sessionInfo()
## R version 3.5.1 (2018-07-02)
## Platform: x86_64-w64-mingw32/x64 (64-bit)
## Running under: Windows 10 x64 (build 17134)
## 
## Matrix products: default
## 
## locale:
## [1] LC_COLLATE=English_United States.1252 
## [2] LC_CTYPE=English_United States.1252   
## [3] LC_MONETARY=English_United States.1252
## [4] LC_NUMERIC=C                          
## [5] LC_TIME=English_United States.1252    
## 
## attached base packages:
## [1] stats     graphics  grDevices utils     datasets  methods   base     
## 
## loaded via a namespace (and not attached):
##  [1] compiler_3.5.1  magrittr_1.5    tools_3.5.1     htmltools_0.3.6
##  [5] yaml_2.2.0      Rcpp_1.0.0      stringi_1.2.4   rmarkdown_1.11 
##  [9] knitr_1.20      stringr_1.3.1   digest_0.6.18   evaluate_0.12

User Inputs

output.var = params$output.var 

transform.abs = FALSE
log.pred = FALSE
norm.pred = FALSE
if (params$trans == 1){
  transform.abs == TRUE
}else if (params$trans == 2){
  log.pred = TRUE
}else if (params$trans == 3){
  norm.pred = TRUE  
}else{
  message("You have chosen no transformation")
}

eda = params$eda
algo.forward = params$algo.forward
algo.backward = params$algo.backward
algo.stepwise = params$algo.stepwise
algo.LASSO = params$algo.LASSO
algo.LARS = params$algo.LARS
  
algo.forward.caret = params$algo.forward.caret
algo.backward.caret = params$algo.backward.caret
algo.stepwise.caret = params$algo.stepwise.caret
algo.LASSO.caret = params$algo.LASSO.caret
algo.LARS.caret = params$algo.LARS.caret

message("Parameters used for training/prediction: ")
## Parameters used for training/prediction:
str(params)
## List of 13
##  $ output.var         : chr "y3"
##  $ trans              : int 2
##  $ eda                : logi FALSE
##  $ algo.forward       : logi FALSE
##  $ algo.backward      : logi FALSE
##  $ algo.stepwise      : logi FALSE
##  $ algo.LASSO         : logi FALSE
##  $ algo.LARS          : logi FALSE
##  $ algo.forward.caret : logi TRUE
##  $ algo.backward.caret: logi TRUE
##  $ algo.stepwise.caret: logi TRUE
##  $ algo.LASSO.caret   : logi TRUE
##  $ algo.LARS.caret    : logi TRUE
# Setup Labels
# alt.scale.label.name = Alternate Scale variable name
#   - if predicting on log, then alt.scale is normal scale
#   - if predicting on normal scale, then alt.scale is log scale
if (log.pred == TRUE){
  label.names = paste('log.',output.var,sep="")
  alt.scale.label.name = output.var
}
if (log.pred == FALSE & norm.pred==FALSE){
  label.names = output.var
  alt.scale.label.name = paste('log.',output.var,sep="")
}
if (norm.pred==TRUE){
  label.names = paste('norm.',output.var,sep="")
  alt.scale.label.name = output.var
}

Prepare Data

Read and Clean Features

features = read.csv("../../Data/features.csv")
features.highprec = read.csv("../../Data/features_highprec.csv")
all.equal(features, features.highprec)
##  [1] "Component \"x11\": Mean relative difference: 0.001401482"     
##  [2] "Component \"stat9\": Mean relative difference: 0.0002946299"  
##  [3] "Component \"stat12\": Mean relative difference: 0.0005151515" 
##  [4] "Component \"stat13\": Mean relative difference: 0.001354369"  
##  [5] "Component \"stat18\": Mean relative difference: 0.0005141104" 
##  [6] "Component \"stat22\": Mean relative difference: 0.001135977"  
##  [7] "Component \"stat25\": Mean relative difference: 0.0001884615" 
##  [8] "Component \"stat29\": Mean relative difference: 0.001083691"  
##  [9] "Component \"stat36\": Mean relative difference: 0.00021513"   
## [10] "Component \"stat37\": Mean relative difference: 0.0004578125" 
## [11] "Component \"stat43\": Mean relative difference: 0.0003473684" 
## [12] "Component \"stat45\": Mean relative difference: 0.0002951699" 
## [13] "Component \"stat46\": Mean relative difference: 0.0009745763" 
## [14] "Component \"stat47\": Mean relative difference: 8.829902e-05" 
## [15] "Component \"stat55\": Mean relative difference: 0.001438066"  
## [16] "Component \"stat57\": Mean relative difference: 0.0001056911" 
## [17] "Component \"stat58\": Mean relative difference: 0.0004882261" 
## [18] "Component \"stat60\": Mean relative difference: 0.0002408377" 
## [19] "Component \"stat62\": Mean relative difference: 0.0004885106" 
## [20] "Component \"stat66\": Mean relative difference: 1.73913e-06"  
## [21] "Component \"stat67\": Mean relative difference: 0.0006265823" 
## [22] "Component \"stat73\": Mean relative difference: 0.003846154"  
## [23] "Component \"stat75\": Mean relative difference: 0.002334906"  
## [24] "Component \"stat83\": Mean relative difference: 0.0005628415" 
## [25] "Component \"stat86\": Mean relative difference: 0.0006104418" 
## [26] "Component \"stat94\": Mean relative difference: 0.001005115"  
## [27] "Component \"stat97\": Mean relative difference: 0.0003551913" 
## [28] "Component \"stat98\": Mean relative difference: 0.0006157635" 
## [29] "Component \"stat106\": Mean relative difference: 0.0008267717"
## [30] "Component \"stat109\": Mean relative difference: 0.0005121359"
## [31] "Component \"stat110\": Mean relative difference: 0.0007615527"
## [32] "Component \"stat111\": Mean relative difference: 0.001336134" 
## [33] "Component \"stat114\": Mean relative difference: 7.680492e-05"
## [34] "Component \"stat117\": Mean relative difference: 0.0002421784"
## [35] "Component \"stat122\": Mean relative difference: 0.0006521084"
## [36] "Component \"stat123\": Mean relative difference: 8.333333e-05"
## [37] "Component \"stat125\": Mean relative difference: 0.002385135" 
## [38] "Component \"stat130\": Mean relative difference: 0.001874016" 
## [39] "Component \"stat132\": Mean relative difference: 0.0003193182"
## [40] "Component \"stat135\": Mean relative difference: 0.0001622517"
## [41] "Component \"stat136\": Mean relative difference: 7.722008e-05"
## [42] "Component \"stat138\": Mean relative difference: 0.0009739953"
## [43] "Component \"stat143\": Mean relative difference: 0.0004845361"
## [44] "Component \"stat146\": Mean relative difference: 0.0005821596"
## [45] "Component \"stat148\": Mean relative difference: 0.0005366922"
## [46] "Component \"stat153\": Mean relative difference: 0.0001557522"
## [47] "Component \"stat154\": Mean relative difference: 0.001351916" 
## [48] "Component \"stat157\": Mean relative difference: 0.0005427928"
## [49] "Component \"stat162\": Mean relative difference: 0.002622951" 
## [50] "Component \"stat167\": Mean relative difference: 0.0005905172"
## [51] "Component \"stat168\": Mean relative difference: 0.0002791096"
## [52] "Component \"stat169\": Mean relative difference: 0.0004121827"
## [53] "Component \"stat170\": Mean relative difference: 0.0004705882"
## [54] "Component \"stat174\": Mean relative difference: 0.0003822894"
## [55] "Component \"stat179\": Mean relative difference: 0.0008286604"
## [56] "Component \"stat184\": Mean relative difference: 0.0007526718"
## [57] "Component \"stat187\": Mean relative difference: 0.0005122768"
## [58] "Component \"stat193\": Mean relative difference: 4.215116e-05"
## [59] "Component \"stat199\": Mean relative difference: 0.002155844" 
## [60] "Component \"stat203\": Mean relative difference: 0.0003738318"
## [61] "Component \"stat213\": Mean relative difference: 0.000667676" 
## [62] "Component \"stat215\": Mean relative difference: 0.0003997955"
head(features)
##     JobName        x1       x2       x3        x4       x5        x6
## 1 Job_00001 2.0734508 4.917267 19.96188  3.520878 7.861051 1.6067589
## 2 Job_00002 2.2682543 4.955773 19.11939 19.763031 6.931355 1.3622041
## 3 Job_00003 1.7424456 2.059819 13.37912 38.829132 6.274053 2.0529845
## 4 Job_00004 0.7873555 2.613983 17.23044 64.402557 5.377652 0.9067419
## 5 Job_00005 2.3342753 4.299076 14.64883 52.537304 6.793368 2.4605792
## 6 Job_00006 1.2365089 2.795370 11.13127 96.819939 6.583971 2.3510606
##         x7        x8       x9       x10      x11      x12       x13
## 1 2.979479  8.537228 1.103368 4.6089458 1.05e-07 7.995825 13.215498
## 2 2.388119  6.561461 0.588572 1.0283282 1.03e-07 7.486966 22.557224
## 3 2.043592 10.275595 4.834385 4.3872848 1.06e-07 6.350142 15.049810
## 4 2.395118 13.487331 3.340190 4.5053501 9.47e-08 9.548698 17.170635
## 5 2.891535  9.362389 1.246039 1.7333300 1.01e-07 9.596095  5.794567
## 6 1.247838  7.033354 1.852231 0.4839371 1.07e-07 3.810983 23.863169
##        x14       x15       x16      x17      x18      x19      x20
## 1 4.377983 0.2370623  6.075459 3.988347 4.767475 2.698775 1.035893
## 2 2.059315 0.5638121  6.903891 4.152054 6.849232 9.620731 1.915288
## 3 3.260057 2.0603445  8.424065 4.489893 3.493591 4.715386 1.558103
## 4 3.093478 1.8806034 11.189792 2.134271 5.588357 5.107871 1.489588
## 5 3.943076 1.5820830  7.096742 3.563378 7.765610 1.360272 1.240283
## 6 1.280562 1.1733382  7.062051 1.341864 7.748325 5.009365 1.725179
##        x21      x22      x23      stat1      stat2      stat3      stat4
## 1 42.36548 1.356213 2.699796  2.3801832  0.1883335 -1.2284011 -0.5999233
## 2 26.63295 4.053961 2.375127 -1.4069480  1.8140973  1.6204884  2.6422672
## 3 20.09693 3.079888 4.488420 -0.7672566 -0.1230289  1.1415752  2.9805934
## 4 32.60415 1.355396 3.402398  0.4371202 -1.9355906  0.9028624 -1.6025400
## 5 44.58361 1.940301 2.249011  2.4492466 -0.6172000 -2.5520642 -2.1485929
## 6 28.75102 2.500499 5.563972 -1.7899084  1.8853619  2.4154840 -2.6022179
##          stat5      stat6      stat7      stat8      stat9      stat10
## 1  0.148893163 -0.6622978 -2.4851868  0.3647782  2.5364335  2.92067981
## 2  1.920768980  1.7411555 -1.9599979 -2.0190558 -1.3732762 -0.31642506
## 3  2.422584300 -0.4166040  2.2205689 -2.6741531  0.4844292  2.73379230
## 4 -0.001795933 -0.6946563 -0.3693534 -0.9709467  1.7960306  0.74771154
## 5 -2.311132430 -1.0166832  2.7269876  1.5424492 -1.3156369 -0.09767897
## 6 -1.785491470 -1.8599915  1.4875095  2.0188572 -1.4892503 -1.41103566
##       stat11    stat12     stat13     stat14     stat15     stat16
## 1 -2.3228905 -2.480567 -0.6335157 -0.3650149 -0.5322812  0.6029300
## 2 -0.8547903  1.119316  0.7227427  0.2121097 -0.1452281 -2.0361528
## 3 -2.1821580  2.865401 -2.9756081  2.9871745  1.9539525 -1.8857163
## 4  1.3982378  1.856765 -1.0379983  2.3341896  2.3057184 -2.8947697
## 5  0.9567220  2.567549  0.3184886  1.0307668  0.1644241 -0.6613821
## 6  0.5341771 -1.461822  0.4402476 -1.9282095 -0.3680157  1.8188807
##        stat17     stat18     stat19     stat20     stat21     stat22
## 1 -1.04516208  2.3544915  2.4049001  0.2633883 -0.9788178  1.7868229
## 2  0.09513074  0.4727738  1.8899702  2.7892542 -1.3919091 -1.7198164
## 3  0.40285346  1.4655282 -1.4952788  2.9162340 -2.3893208  2.8161423
## 4  2.97446084  2.3896182  2.3083484 -1.1894441 -2.1982553  1.3666242
## 5 -0.98465055  0.6900643  1.5894209 -2.1204538  1.7961155 -0.9362189
## 6 -1.45726359 -2.1139548 -0.3964904  1.1764175 -2.9100556 -2.1359294
##       stat23     stat24     stat25    stat26     stat27      stat28
## 1 -2.3718851  2.8580718 -0.4719713 -2.817086 -0.9518474  2.88892484
## 2 -2.3293245  1.5577759 -1.9569720  1.554194 -0.5081459 -1.58715141
## 3 -2.5402296  0.1422861  0.3572798 -1.051886 -2.1541717  0.03074004
## 4 -1.9679050 -1.4077642  2.5097435  1.683121 -0.2549745 -2.90384054
## 5  2.0523429 -2.2084844 -1.9280857 -2.116736  1.8180779 -1.42167580
## 6  0.2184991 -0.7599817  2.6880329 -2.903350 -1.0733233 -2.92416644
##       stat29     stat30     stat31     stat32      stat33     stat34
## 1  0.7991088 -2.0059092 -0.2461502  0.6482101 -2.87462163 -0.3601543
## 2  1.9758110 -0.3874187  1.3566630  2.6493473  2.28463054  1.8591728
## 3 -0.4460218  1.0279679  1.3998452 -1.0183365  1.41109037 -2.4183984
## 4  1.0571996  2.5588036 -2.9830337 -1.1299983  0.05470414 -1.5566561
## 5  0.8854889  2.2774174  2.6499031  2.3053405 -2.39148426 -1.8272992
## 6 -0.8405267  0.1311945  0.4321289 -2.9622040 -2.55387473  2.6396458
##       stat35     stat36     stat37     stat38     stat39     stat40
## 1  2.4286051 -0.5420244 -2.6782637 -2.8874269 -0.8945006  1.1749642
## 2  1.3709245 -1.3714181  1.3901204  1.2273489 -0.8934880  1.0540369
## 3 -0.9805572  2.0571353  0.8845031  2.0574493  1.1222047  1.8528618
## 4  1.0969149 -2.2820673  1.8852408  0.5391517  2.7334342 -0.4372566
## 5 -1.0971669  1.4867796 -2.3738465 -0.3743561  1.4266498  1.2551680
## 6  0.4584349 -2.2696617 -0.9935142 -0.5350499 -0.7874799  2.0009417
##       stat41     stat42     stat43     stat44     stat45      stat46
## 1 -1.0474428 -1.3909023 2.54110503 -1.4320793  0.6298335 -2.09296608
## 2  2.5380247  1.6476108 0.44128850 -2.5049477  1.2726039  1.72492969
## 3  1.1477574  0.2288794 0.08891252  2.3044751 -0.7735722 -0.07302936
## 4 -1.3808300 -2.7900956 2.38297582  0.1686397 -2.1591296  1.60828602
## 5  0.2257536  1.9542116 2.66429019  0.8026123 -1.5521187  1.61751962
## 6 -1.3364114 -2.2898803 2.80735397 -0.8413086  1.0057797 -1.50653386
##       stat47     stat48     stat49     stat50     stat51     stat52
## 1 -2.8318939  2.1445766  0.5668035  0.1544579  0.6291955  2.2197027
## 2 -0.5804687 -1.3689737  1.4908396  1.2465997  0.8896304 -2.6024318
## 3  0.7918019  1.5712964  1.1038082 -0.2545658 -2.1662638  0.2660159
## 4 -1.8894132  0.5680230 -0.7023218 -0.3972188  0.1578027  2.1770194
## 5  2.1088455 -2.7195437  2.1961412 -0.2615084  1.2109556  0.8260623
## 6 -1.4400891 -0.9421459 -1.7324599 -2.1720727 -2.8129435  0.6958785
##      stat53     stat54      stat55     stat56     stat57      stat58
## 1  2.176805  0.5546907 -2.19704103 -0.2884173  1.3232913 -1.32824039
## 2 -2.107441  1.3864788  0.08781975  1.9998228  0.8014438 -0.26979154
## 3  1.234197  2.1337581  1.65231645 -0.4388691 -0.1811156  2.11277962
## 4  2.535406 -2.1387620  0.12856023 -1.9906180  0.9626449  1.65232646
## 5 -2.457080  2.1633499  0.60441124  2.5449364 -1.4978440  2.60542655
## 6  2.003033 -0.5379940 -2.19647264 -1.1954677 -0.5974466 -0.04703835
##        stat59     stat60    stat61      stat62     stat63     stat64
## 1  1.24239659 -2.5798278  1.327928  1.68560362  0.6284891 -1.6798652
## 2  0.06379301  0.9465770  1.116928  0.03128772 -2.1944375  0.3382609
## 3  0.93223447  2.4597080  0.465251 -1.71033382 -0.5156728  1.8276784
## 4 -0.29840910  0.7273473 -2.313066 -1.47696018  2.5910559 -1.5127999
## 5 -1.17610002 -1.7948418 -2.669305  0.17813617  2.8956099  2.9411416
## 6 -1.01793981  0.2817057  2.228023 -0.86494124 -0.9747949 -0.1569053
##       stat65     stat66     stat67     stat68     stat69    stat70
## 1 -2.9490898 -0.3325469  1.5745990 -2.2978280  1.5451891 -1.345990
## 2 -1.1174885 -1.5728682 -2.9229002  0.2658547 -1.9616533  2.506130
## 3 -0.2231264 -0.4503301  0.7932286 -1.2453773 -2.2309763  2.309761
## 4 -0.3522418 -2.0720532  0.9442933  2.9212906  0.5100371 -2.441108
## 5 -2.1648991  1.2002029  2.8266985  0.7461294  1.6772674 -1.280000
## 6 -2.2295458  1.1446493  0.2024925 -0.2983998 -2.8203752  1.224030
##       stat71     stat72     stat73     stat74      stat75     stat76
## 1  1.0260956  2.1071210  2.6625669 -2.8924677 -0.02132523 -2.5058765
## 2  0.3525076  1.6922342 -1.2167022 -1.7271879  2.21176434  1.9329683
## 3 -2.1799035 -2.2645276  0.1415582  0.9887453  1.95592320  0.2951785
## 4 -2.4051409  2.0876484 -0.8632146  0.4011389 -1.16986716 -1.2391174
## 5  1.3538754 -0.8089395 -0.5122626 -2.1696892  1.07344925  2.6696169
## 6 -2.8073371 -1.4450488  0.5481212 -1.4381690  0.80917043 -0.1365944
##       stat77      stat78      stat79     stat80     stat81     stat82
## 1 -2.5631845 -2.40331340  0.38416120 -1.2564875 -0.1550840 -1.1762617
## 2 -0.4462085  0.38400793  1.80483031 -0.8387642  0.7624431  0.9936900
## 3  1.6757870 -1.81900752  2.70904708 -0.3201959  2.5754235  1.6346260
## 4 -2.1012006 -2.24691081  1.78056848  1.0323739  1.0762523  2.1343851
## 5 -2.5736733 -1.99958372 -0.05388495 -2.5630073 -2.8783002 -0.5752426
## 6  1.6143972  0.03233746  2.90835762  1.4000487  2.9275615 -2.8503830
##       stat83     stat84     stat85     stat86    stat87     stat88
## 1  1.2840565 -2.6794965  1.3956039 -1.5290235  2.221152  2.3794982
## 2 -0.2380048  1.9314318 -1.6747955 -0.3663656  1.582659 -0.5222489
## 3 -0.9150769 -1.5520337  2.4186287  2.7273662  1.306642  0.1320062
## 4 -2.5824408 -2.7775943  0.5085060  0.4689015  2.053348  0.7957955
## 5 -1.0017741 -0.2009138  0.3770109  2.4335201 -1.118058  1.3953410
## 6  2.4891765  2.9931953 -1.4171852  0.3905659 -1.856119 -2.1690490
##       stat89     stat90     stat91      stat92     stat93     stat94
## 1 -0.9885110 -0.8873261 -2.7810929 -1.53325891  2.6002395  1.8890998
## 2  0.9982028 -1.2382015 -0.1574496  0.41086048 -0.5412626 -0.2421387
## 3  0.5956759  1.6871066  2.2452753  2.74279594 -1.5860478  2.9393122
## 4  2.0902634  2.1752586 -2.0677712 -2.37861037  1.1653302  0.1500632
## 5  2.9820614  0.8111660 -0.7842287  0.03766387 -1.1681970  2.1217251
## 6 -1.7428021  0.1579032  1.7456742 -0.36858466 -0.1304616 -1.4555819
##       stat95     stat96      stat97     stat98     stat99   stat100
## 1 -2.6056035 -0.5814857  2.57652426 -2.3297751  2.6324007  1.445827
## 2 -2.0271583 -0.9126074  2.49582648  0.9745382  1.1339203 -2.549544
## 3  0.3823181 -0.6324139  2.46221566  1.1151560  0.4624891  0.107072
## 4  2.6414623 -0.6630505  2.10394859  1.2627635  0.4861740  1.697012
## 5  1.4642254  2.6485956 -0.07699547  0.6219473 -1.8815142 -2.685463
## 6  1.8937331 -0.4690555  1.04671776 -0.5879866 -0.9766789  2.405940
##      stat101   stat102    stat103    stat104    stat105    stat106
## 1 -2.1158021  2.603936  1.7745128 -1.8903574 -1.8558655  1.0122044
## 2 -2.7998588 -2.267895  0.5336456 -0.2859477 -0.5196246 -0.9417582
## 3  0.7969509 -1.744906 -0.7960327  1.9767258 -0.2007264 -0.7872376
## 4  1.7071959 -1.540221  1.6770362  1.5395796 -0.4855365 -1.2894115
## 5 -1.4627420 -1.700983  2.4376490  0.2731541  1.5275587  1.3256483
## 6  2.6888530  1.090155  2.0769854  1.9615480  1.8689761  2.8975825
##     stat107    stat108   stat109    stat110    stat111    stat112
## 1  1.954508 -0.3376471  2.503084  0.3099165  2.7209847 -2.3911204
## 2 -2.515160  0.3998704 -1.077093  2.4228268 -0.7759693  0.2513882
## 3  1.888827  1.5819857 -2.066659 -2.0008364  0.6997684  2.6157095
## 4  1.076395 -1.8524148 -2.689204  1.0985872  1.2389493  2.1018629
## 5  2.828866 -1.8590252 -2.424163  1.4391942 -0.6173239 -1.5218846
## 6 -1.419639  0.7888914  1.996463  0.9813507  0.9034198  1.3810679
##     stat113    stat114    stat115     stat116   stat117    stat118
## 1 -1.616161  1.0878664  0.9860094 -0.06288462 -1.013501 -1.2212842
## 2 -1.554771  1.8683100  0.4880588 -0.63865489 -1.610217 -1.7713343
## 3 -2.679801 -2.9486952  1.7753417  0.90311784 -1.318836 -0.1429040
## 4  2.459229 -0.5584171  0.4419581 -0.09586351  0.595442  0.2883342
## 5 -2.102200  1.6300170 -2.3498287  1.36771894 -1.912202 -0.2563821
## 6 -1.835037  0.6577786 -2.9928374  2.13540316 -1.437299 -0.9570006
##      stat119    stat120    stat121    stat122    stat123     stat124
## 1  2.9222729  1.9151262  1.6686068  2.0061224  1.5723072  0.78819227
## 2  2.1828208  0.8283178 -2.4458632  1.7133740  1.1393738 -0.07182054
## 3  0.9721319  1.2723130  2.8002086  2.7670381 -2.2252586  2.17499113
## 4 -1.9327896 -2.5369370  1.7835028  1.0262097 -1.8790983 -0.43639564
## 5  1.3230809 -2.8145256 -0.9547533 -2.0435417 -0.2758764 -1.85668027
## 6  0.1720700 -1.4568460  1.4115051 -0.9878145  2.3895061 -2.33730745
##     stat125    stat126    stat127   stat128    stat129     stat130
## 1  1.588372  1.1620011 -0.2474264  1.650328  2.5147598  0.37283245
## 2 -1.173771  0.8162020  0.3510315 -1.263667  1.7245284 -0.72852904
## 3 -1.503497 -0.5656394  2.8040256 -2.139287 -1.7221642  2.17899609
## 4  1.040967 -2.9039600  0.3103742  1.462339 -1.2940350 -2.95015502
## 5 -2.866184  1.6885070 -2.2525666 -2.628631  1.8581577  2.80127025
## 6 -1.355111  1.5017927  0.4295921 -0.580415  0.9851009 -0.03773117
##       stat131    stat132    stat133    stat134    stat135      stat136
## 1 -0.09028241  0.5194538  2.8478346  2.6664724 -2.0206311  1.398415090
## 2 -0.53045595  1.4134049  2.9180586  0.3299096  1.4784122 -1.278896090
## 3  1.35843194  0.2279946  0.3532595  0.6138676 -0.3443284  0.057763811
## 4 -1.92450273  1.2698178 -1.5299660 -2.6083462  1.1665530 -0.187791914
## 5  1.49036849  2.6337729 -2.3206244  0.4978287 -1.7397571  0.001200184
## 6 -0.64642709 -1.9256228  1.7032650 -0.9152725 -0.3188055  2.155395980
##      stat137    stat138    stat139    stat140    stat141    stat142
## 1 -1.2794871  0.4064890 -0.4539998  2.6660173 -1.8375313  0.4711883
## 2 -2.7709017 -1.6303773 -1.9025910  0.2572918  0.6612002  1.4764348
## 3 -1.1930757 -0.1051243 -0.5108380 -1.0879666  2.4969513 -0.9477230
## 4 -1.2318919  2.2348571  0.1788580 -1.5851788 -1.2384283 -2.1859181
## 5  1.8685058  2.7229517 -2.9077182  2.6606939 -1.5963592 -2.2213492
## 6 -0.4807318 -1.2117369 -0.9358531 -2.5100758 -2.3803916 -0.7096854
##     stat143    stat144    stat145    stat146     stat147    stat148
## 1 1.9466263  2.2689433 -0.3597288 -0.6551386  1.65438592  0.6404466
## 2 1.3156421  2.4459090 -0.3790028  1.4858465 -0.07784461  1.0096149
## 3 0.1959563  2.3062942  1.8459278  2.6848175 -2.70935774 -1.2093409
## 4 1.7633296 -2.8171508  2.0902622 -2.6625464 -1.12600601 -2.1926479
## 5 0.3885758  1.8160636  2.8257299 -1.4526173  1.60679603  2.3807991
## 6 0.7623450  0.2692145 -2.4307463 -2.1244523 -2.67803812 -1.5273387
##      stat149   stat150    stat151    stat152    stat153    stat154
## 1  0.1583575 0.4755351  0.3213410  2.0241520  1.5720103 -0.1825875
## 2 -0.4311406 2.9577663  0.6937252  0.1397280  0.3775735 -1.1012636
## 3 -0.8352824 2.5716205  1.7528236  0.4326277 -2.2334397 -2.6265771
## 4 -2.8069143 1.8813509  2.3358023  0.1015632  1.2117474 -1.3714278
## 5 -1.6166265 1.1112266 -1.1998471  2.9316769 -2.1676455 -0.3411089
## 6 -0.2265472 2.7264354 -1.6746094 -2.3376281 -1.7022788 -1.2352397
##     stat155     stat156    stat157    stat158     stat159   stat160
## 1 -1.139657  0.07061254  0.5893906 -1.9920996 -2.83714366  2.249398
## 2 -2.041093  0.74047768  2.5415072 -1.2697256 -1.64364433 -2.448922
## 3 -1.219507 -0.55198693  0.4046920  1.2098547 -0.90412390 -1.934093
## 4  2.992191  2.33222485  2.0622969 -0.6714653  2.76836085 -1.431120
## 5 -2.362356 -1.23906672  0.4746319 -0.7849202  0.69399995  2.052411
## 6 -1.604499  1.31051409 -0.5164744  0.6288667  0.07899523 -2.287402
##      stat161    stat162    stat163    stat164    stat165    stat166
## 1  1.7182635 -1.2323593  2.7350423  1.0707235  1.1621544  0.9493989
## 2 -0.6247674  2.6740098  2.8211024  1.5561292 -1.1027147  1.0519739
## 3 -0.6230453 -0.7993517 -2.8318374 -1.1148673  1.4261659  0.5294309
## 4  1.7644744  0.1696584  1.2653207  0.6621516  0.9470508  0.1985014
## 5 -1.2070210  0.7243784  0.9736322  2.7426259 -2.6862383  1.6840212
## 6  2.3705316 -2.1667893 -0.2516685 -0.8425958 -1.9099342 -2.8607297
##      stat167    stat168    stat169     stat170     stat171    stat172
## 1  0.1146510  2.3872008  1.1180918 -0.95370555 -2.25076509  0.2348182
## 2  1.0760417 -2.0449336  0.9715676 -0.40173489 -0.11953555 -2.3107369
## 3  1.1735898  1.3860190 -2.2894719  0.06350347  0.29191551 -1.6079744
## 4  2.5511832  0.5446648  1.2694012 -0.84571201  0.79789722  0.2623538
## 5  2.2900002  2.6289782 -0.2783571  1.39032829 -0.55532032  1.0499046
## 6 -0.7513983  2.9617066 -2.2119520 -1.71958113 -0.01452018 -0.2751517
##       stat173   stat174     stat175    stat176     stat177    stat178
## 1  1.79366076 -1.920206 -0.38841942  0.8530301  1.64532077 -1.1354179
## 2 -0.07484659  1.337846  2.20911694  0.9616837 -2.80810070 -2.1136749
## 3 -1.05521810 -1.483741  0.06148359  2.3066039 -0.34688616  1.1840581
## 4  0.31460321  1.195741  2.97633862  1.1685091 -0.06346265  1.4205489
## 5 -1.39428365  2.458523  0.64836472 -1.0396386 -0.57828104 -0.5006818
## 6  2.31844401  1.239864 -2.06490874  0.7696204 -1.77586019  2.0855925
##      stat179    stat180     stat181    stat182    stat183    stat184
## 1  2.0018647  0.1476815 -1.27279520  1.9181504 -0.5297624 -2.9718938
## 2 -2.1351449  2.9012582 -1.09914911 -2.5488517 -2.8377736  1.4073374
## 3 -1.7819908  2.9902627  0.81908613  0.2503852  0.3712984 -2.1714024
## 4 -0.1026974 -2.4763253 -2.52645421  1.3096315  2.1458161 -1.5228094
## 5 -2.2298794  2.4465680 -0.70346898 -1.6997617  2.9178164 -0.3615532
## 6 -1.1168108  1.5552123 -0.01361342  1.7338791 -1.1104763  0.1882416
##      stat185    stat186   stat187    stat188    stat189    stat190
## 1 -0.1043832 -1.5047463  2.700351 -2.4780862 -1.9078265  0.9978108
## 2 -2.0310574 -0.5380074 -1.963275 -1.2221278 -2.4290681 -1.9515115
## 3  2.6727278  1.2688179 -1.399018 -2.9612138  2.6456394  2.0073323
## 4 -2.7796295  2.0682354  2.243727  0.4296881  0.1931333  2.2710960
## 5 -0.6231265  2.5833981  2.229041  0.8139584  1.4544131  1.8886451
## 6  2.7204690 -2.4469144 -1.421998  1.7477882 -0.1481806  0.6011560
##      stat191    stat192    stat193    stat194   stat195    stat196
## 1 -0.6644351  2.6270833 -1.1094601 -2.4200392  2.870713 -0.6590932
## 2 -0.6483142  1.4519118 -0.1963493 -2.3025322  1.255608  2.1617947
## 3 -1.5457382 -0.2977442 -1.7045015  0.7962404 -1.696063 -1.4771117
## 4 -1.1780495 -2.9747574 -1.1471518 -1.2377013 -1.010672 -2.6055975
## 5  2.8813178 -1.8964081 -1.2653487 -1.7839754 -2.872581  2.3033464
## 6  0.4437973  0.6599325 -1.4029555 -2.3118258 -1.792232  1.3934380
##       stat197    stat198    stat199    stat200    stat201    stat202
## 1 -0.83056986  0.9550526 -1.7025776 -2.8263099 -0.7023998  0.2272806
## 2 -1.42178249 -1.2471864  2.5723093 -0.0233496 -1.8975239  1.9472262
## 3 -0.19233958 -0.5161456  0.0279946 -1.2333704 -2.9672263 -2.8666208
## 4 -1.23145902  1.4728470 -0.4562025 -2.2983441 -1.5101184  0.2530525
## 5  1.85018563 -1.8269292 -0.6337969 -2.1473246  0.9909850  1.0950903
## 6 -0.09311061  0.5144456 -2.8178268 -2.7555969 -2.3546004 -1.0558939
##        stat203      stat204    stat205    stat206    stat207    stat208
## 1  1.166631220  0.007453276  2.9961641  1.5327307 -2.2293356 -0.9946009
## 2 -0.235396504  2.132749800  0.3707606  1.5604026 -1.0089217  2.1474257
## 3  0.003180946  2.229793310  2.7354040  0.8992231  2.9694967  2.3081024
## 4 -0.474482715 -1.584772230 -2.3224132 -0.9409741 -2.3179255  0.8032548
## 5  2.349412920 -1.276320220 -2.0203719 -1.1733509  1.0371852 -2.5086207
## 6  0.727436960 -0.960191786 -0.8964998 -1.6406623 -0.2330488  1.7993879
##      stat209    stat210   stat211    stat212    stat213    stat214
## 1 -2.2182105 -1.4099774 -1.656754  2.6602585 -2.9270992  1.1240714
## 2 -2.8932488 -1.1641679 -2.605423 -1.5650513  2.9523673  2.0266318
## 3 -1.8279589  0.0472350 -2.026734  2.5054367  0.9903042  0.3274105
## 4 -1.0878067  0.1171303  2.645891 -1.6775225  1.3452160  1.4694063
## 5 -0.8158175  0.4060950  0.912256  0.2925677  2.1610141  0.5679936
## 6 -2.2664354 -0.2061083 -1.435174  2.6645632  0.4216259 -0.6419122
##      stat215    stat216    stat217
## 1 -2.7510750 -0.5501796  1.2638469
## 2  2.8934650 -2.4099574 -1.2411407
## 3 -1.0947676  1.2852937  1.5411530
## 4  0.6343777  0.1345372  2.9102673
## 5  0.9908702  1.7909757 -2.0902610
## 6 -2.8113887 -1.0624912  0.2765074
head(features.highprec)
##     JobName        x1       x2       x3        x4       x5        x6
## 1 Job_00001 2.0734508 4.917267 19.96188  3.520878 7.861051 1.6067589
## 2 Job_00002 2.2682543 4.955773 19.11939 19.763031 6.931355 1.3622041
## 3 Job_00003 1.7424456 2.059819 13.37912 38.829132 6.274053 2.0529845
## 4 Job_00004 0.7873555 2.613983 17.23044 64.402557 5.377652 0.9067419
## 5 Job_00005 2.3342753 4.299076 14.64883 52.537304 6.793368 2.4605792
## 6 Job_00006 1.2365089 2.795370 11.13127 96.819939 6.583971 2.3510606
##         x7        x8       x9       x10          x11      x12       x13
## 1 2.979479  8.537228 1.103368 4.6089458 1.050025e-07 7.995825 13.215498
## 2 2.388119  6.561461 0.588572 1.0283282 1.034518e-07 7.486966 22.557224
## 3 2.043592 10.275595 4.834385 4.3872848 1.062312e-07 6.350142 15.049810
## 4 2.395118 13.487331 3.340190 4.5053501 9.471887e-08 9.548698 17.170635
## 5 2.891535  9.362389 1.246039 1.7333300 1.010552e-07 9.596095  5.794567
## 6 1.247838  7.033354 1.852231 0.4839371 1.071662e-07 3.810983 23.863169
##        x14       x15       x16      x17      x18      x19      x20
## 1 4.377983 0.2370623  6.075459 3.988347 4.767475 2.698775 1.035893
## 2 2.059315 0.5638121  6.903891 4.152054 6.849232 9.620731 1.915288
## 3 3.260057 2.0603445  8.424065 4.489893 3.493591 4.715386 1.558103
## 4 3.093478 1.8806034 11.189792 2.134271 5.588357 5.107871 1.489588
## 5 3.943076 1.5820830  7.096742 3.563378 7.765610 1.360272 1.240283
## 6 1.280562 1.1733382  7.062051 1.341864 7.748325 5.009365 1.725179
##        x21      x22      x23      stat1      stat2      stat3      stat4
## 1 42.36548 1.356213 2.699796  2.3801832  0.1883335 -1.2284011 -0.5999233
## 2 26.63295 4.053961 2.375127 -1.4069480  1.8140973  1.6204884  2.6422672
## 3 20.09693 3.079888 4.488420 -0.7672566 -0.1230289  1.1415752  2.9805934
## 4 32.60415 1.355396 3.402398  0.4371202 -1.9355906  0.9028624 -1.6025400
## 5 44.58361 1.940301 2.249011  2.4492466 -0.6172000 -2.5520642 -2.1485929
## 6 28.75102 2.500499 5.563972 -1.7899084  1.8853619  2.4154840 -2.6022179
##          stat5      stat6      stat7      stat8      stat9      stat10
## 1  0.148893163 -0.6622978 -2.4851868  0.3647782  2.5364335  2.92067981
## 2  1.920768980  1.7411555 -1.9599979 -2.0190558 -1.3732762 -0.31642506
## 3  2.422584300 -0.4166040  2.2205689 -2.6741531  0.4844292  2.73379230
## 4 -0.001795933 -0.6946563 -0.3693534 -0.9709467  1.7960306  0.74771154
## 5 -2.311132430 -1.0166832  2.7269876  1.5424492 -1.3156369 -0.09767897
## 6 -1.785491470 -1.8599915  1.4875095  2.0188572 -1.4892503 -1.41103566
##       stat11    stat12     stat13     stat14     stat15     stat16
## 1 -2.3228905 -2.480567 -0.6335157 -0.3650149 -0.5322812  0.6029300
## 2 -0.8547903  1.119316  0.7227427  0.2121097 -0.1452281 -2.0361528
## 3 -2.1821580  2.865401 -2.9756081  2.9871745  1.9539525 -1.8857163
## 4  1.3982378  1.856765 -1.0379983  2.3341896  2.3057184 -2.8947697
## 5  0.9567220  2.567549  0.3184886  1.0307668  0.1644241 -0.6613821
## 6  0.5341771 -1.461822  0.4402476 -1.9282095 -0.3680157  1.8188807
##        stat17     stat18     stat19     stat20     stat21     stat22
## 1 -1.04516208  2.3544915  2.4049001  0.2633883 -0.9788178  1.7868229
## 2  0.09513074  0.4727738  1.8899702  2.7892542 -1.3919091 -1.7198164
## 3  0.40285346  1.4655282 -1.4952788  2.9162340 -2.3893208  2.8161423
## 4  2.97446084  2.3896182  2.3083484 -1.1894441 -2.1982553  1.3666242
## 5 -0.98465055  0.6900643  1.5894209 -2.1204538  1.7961155 -0.9362189
## 6 -1.45726359 -2.1139548 -0.3964904  1.1764175 -2.9100556 -2.1359294
##       stat23     stat24     stat25    stat26     stat27      stat28
## 1 -2.3718851  2.8580718 -0.4719713 -2.817086 -0.9518474  2.88892484
## 2 -2.3293245  1.5577759 -1.9569720  1.554194 -0.5081459 -1.58715141
## 3 -2.5402296  0.1422861  0.3572798 -1.051886 -2.1541717  0.03074004
## 4 -1.9679050 -1.4077642  2.5097435  1.683121 -0.2549745 -2.90384054
## 5  2.0523429 -2.2084844 -1.9280857 -2.116736  1.8180779 -1.42167580
## 6  0.2184991 -0.7599817  2.6880329 -2.903350 -1.0733233 -2.92416644
##       stat29     stat30     stat31     stat32      stat33     stat34
## 1  0.7991088 -2.0059092 -0.2461502  0.6482101 -2.87462163 -0.3601543
## 2  1.9758110 -0.3874187  1.3566630  2.6493473  2.28463054  1.8591728
## 3 -0.4460218  1.0279679  1.3998452 -1.0183365  1.41109037 -2.4183984
## 4  1.0571996  2.5588036 -2.9830337 -1.1299983  0.05470414 -1.5566561
## 5  0.8854889  2.2774174  2.6499031  2.3053405 -2.39148426 -1.8272992
## 6 -0.8405267  0.1311945  0.4321289 -2.9622040 -2.55387473  2.6396458
##       stat35     stat36     stat37     stat38     stat39     stat40
## 1  2.4286051 -0.5420244 -2.6782637 -2.8874269 -0.8945006  1.1749642
## 2  1.3709245 -1.3714181  1.3901204  1.2273489 -0.8934880  1.0540369
## 3 -0.9805572  2.0571353  0.8845031  2.0574493  1.1222047  1.8528618
## 4  1.0969149 -2.2820673  1.8852408  0.5391517  2.7334342 -0.4372566
## 5 -1.0971669  1.4867796 -2.3738465 -0.3743561  1.4266498  1.2551680
## 6  0.4584349 -2.2696617 -0.9935142 -0.5350499 -0.7874799  2.0009417
##       stat41     stat42     stat43     stat44     stat45      stat46
## 1 -1.0474428 -1.3909023 2.54110503 -1.4320793  0.6298335 -2.09296608
## 2  2.5380247  1.6476108 0.44128850 -2.5049477  1.2726039  1.72492969
## 3  1.1477574  0.2288794 0.08891252  2.3044751 -0.7735722 -0.07302936
## 4 -1.3808300 -2.7900956 2.38297582  0.1686397 -2.1591296  1.60828602
## 5  0.2257536  1.9542116 2.66429019  0.8026123 -1.5521187  1.61751962
## 6 -1.3364114 -2.2898803 2.80735397 -0.8413086  1.0057797 -1.50653386
##       stat47     stat48     stat49     stat50     stat51     stat52
## 1 -2.8318939  2.1445766  0.5668035  0.1544579  0.6291955  2.2197027
## 2 -0.5804687 -1.3689737  1.4908396  1.2465997  0.8896304 -2.6024318
## 3  0.7918019  1.5712964  1.1038082 -0.2545658 -2.1662638  0.2660159
## 4 -1.8894132  0.5680230 -0.7023218 -0.3972188  0.1578027  2.1770194
## 5  2.1088455 -2.7195437  2.1961412 -0.2615084  1.2109556  0.8260623
## 6 -1.4400891 -0.9421459 -1.7324599 -2.1720727 -2.8129435  0.6958785
##      stat53     stat54      stat55     stat56     stat57      stat58
## 1  2.176805  0.5546907 -2.19704103 -0.2884173  1.3232913 -1.32824039
## 2 -2.107441  1.3864788  0.08781975  1.9998228  0.8014438 -0.26979154
## 3  1.234197  2.1337581  1.65231645 -0.4388691 -0.1811156  2.11277962
## 4  2.535406 -2.1387620  0.12856023 -1.9906180  0.9626449  1.65232646
## 5 -2.457080  2.1633499  0.60441124  2.5449364 -1.4978440  2.60542655
## 6  2.003033 -0.5379940 -2.19647264 -1.1954677 -0.5974466 -0.04703835
##        stat59     stat60    stat61      stat62     stat63     stat64
## 1  1.24239659 -2.5798278  1.327928  1.68560362  0.6284891 -1.6798652
## 2  0.06379301  0.9465770  1.116928  0.03128772 -2.1944375  0.3382609
## 3  0.93223447  2.4597080  0.465251 -1.71033382 -0.5156728  1.8276784
## 4 -0.29840910  0.7273473 -2.313066 -1.47696018  2.5910559 -1.5127999
## 5 -1.17610002 -1.7948418 -2.669305  0.17813617  2.8956099  2.9411416
## 6 -1.01793981  0.2817057  2.228023 -0.86494124 -0.9747949 -0.1569053
##       stat65     stat66     stat67     stat68     stat69    stat70
## 1 -2.9490898 -0.3325469  1.5745990 -2.2978280  1.5451891 -1.345990
## 2 -1.1174885 -1.5728682 -2.9229002  0.2658547 -1.9616533  2.506130
## 3 -0.2231264 -0.4503301  0.7932286 -1.2453773 -2.2309763  2.309761
## 4 -0.3522418 -2.0720532  0.9442933  2.9212906  0.5100371 -2.441108
## 5 -2.1648991  1.2002029  2.8266985  0.7461294  1.6772674 -1.280000
## 6 -2.2295458  1.1446493  0.2024925 -0.2983998 -2.8203752  1.224030
##       stat71     stat72     stat73     stat74      stat75     stat76
## 1  1.0260956  2.1071210  2.6625669 -2.8924677 -0.02132523 -2.5058765
## 2  0.3525076  1.6922342 -1.2167022 -1.7271879  2.21176434  1.9329683
## 3 -2.1799035 -2.2645276  0.1415582  0.9887453  1.95592320  0.2951785
## 4 -2.4051409  2.0876484 -0.8632146  0.4011389 -1.16986716 -1.2391174
## 5  1.3538754 -0.8089395 -0.5122626 -2.1696892  1.07344925  2.6696169
## 6 -2.8073371 -1.4450488  0.5481212 -1.4381690  0.80917043 -0.1365944
##       stat77      stat78      stat79     stat80     stat81     stat82
## 1 -2.5631845 -2.40331340  0.38416120 -1.2564875 -0.1550840 -1.1762617
## 2 -0.4462085  0.38400793  1.80483031 -0.8387642  0.7624431  0.9936900
## 3  1.6757870 -1.81900752  2.70904708 -0.3201959  2.5754235  1.6346260
## 4 -2.1012006 -2.24691081  1.78056848  1.0323739  1.0762523  2.1343851
## 5 -2.5736733 -1.99958372 -0.05388495 -2.5630073 -2.8783002 -0.5752426
## 6  1.6143972  0.03233746  2.90835762  1.4000487  2.9275615 -2.8503830
##       stat83     stat84     stat85     stat86    stat87     stat88
## 1  1.2840565 -2.6794965  1.3956039 -1.5290235  2.221152  2.3794982
## 2 -0.2380048  1.9314318 -1.6747955 -0.3663656  1.582659 -0.5222489
## 3 -0.9150769 -1.5520337  2.4186287  2.7273662  1.306642  0.1320062
## 4 -2.5824408 -2.7775943  0.5085060  0.4689015  2.053348  0.7957955
## 5 -1.0017741 -0.2009138  0.3770109  2.4335201 -1.118058  1.3953410
## 6  2.4891765  2.9931953 -1.4171852  0.3905659 -1.856119 -2.1690490
##       stat89     stat90     stat91      stat92     stat93     stat94
## 1 -0.9885110 -0.8873261 -2.7810929 -1.53325891  2.6002395  1.8890998
## 2  0.9982028 -1.2382015 -0.1574496  0.41086048 -0.5412626 -0.2421387
## 3  0.5956759  1.6871066  2.2452753  2.74279594 -1.5860478  2.9393122
## 4  2.0902634  2.1752586 -2.0677712 -2.37861037  1.1653302  0.1500632
## 5  2.9820614  0.8111660 -0.7842287  0.03766387 -1.1681970  2.1217251
## 6 -1.7428021  0.1579032  1.7456742 -0.36858466 -0.1304616 -1.4555819
##       stat95     stat96      stat97     stat98     stat99   stat100
## 1 -2.6056035 -0.5814857  2.57652426 -2.3297751  2.6324007  1.445827
## 2 -2.0271583 -0.9126074  2.49582648  0.9745382  1.1339203 -2.549544
## 3  0.3823181 -0.6324139  2.46221566  1.1151560  0.4624891  0.107072
## 4  2.6414623 -0.6630505  2.10394859  1.2627635  0.4861740  1.697012
## 5  1.4642254  2.6485956 -0.07699547  0.6219473 -1.8815142 -2.685463
## 6  1.8937331 -0.4690555  1.04671776 -0.5879866 -0.9766789  2.405940
##      stat101   stat102    stat103    stat104    stat105    stat106
## 1 -2.1158021  2.603936  1.7745128 -1.8903574 -1.8558655  1.0122044
## 2 -2.7998588 -2.267895  0.5336456 -0.2859477 -0.5196246 -0.9417582
## 3  0.7969509 -1.744906 -0.7960327  1.9767258 -0.2007264 -0.7872376
## 4  1.7071959 -1.540221  1.6770362  1.5395796 -0.4855365 -1.2894115
## 5 -1.4627420 -1.700983  2.4376490  0.2731541  1.5275587  1.3256483
## 6  2.6888530  1.090155  2.0769854  1.9615480  1.8689761  2.8975825
##     stat107    stat108   stat109    stat110    stat111    stat112
## 1  1.954508 -0.3376471  2.503084  0.3099165  2.7209847 -2.3911204
## 2 -2.515160  0.3998704 -1.077093  2.4228268 -0.7759693  0.2513882
## 3  1.888827  1.5819857 -2.066659 -2.0008364  0.6997684  2.6157095
## 4  1.076395 -1.8524148 -2.689204  1.0985872  1.2389493  2.1018629
## 5  2.828866 -1.8590252 -2.424163  1.4391942 -0.6173239 -1.5218846
## 6 -1.419639  0.7888914  1.996463  0.9813507  0.9034198  1.3810679
##     stat113    stat114    stat115     stat116   stat117    stat118
## 1 -1.616161  1.0878664  0.9860094 -0.06288462 -1.013501 -1.2212842
## 2 -1.554771  1.8683100  0.4880588 -0.63865489 -1.610217 -1.7713343
## 3 -2.679801 -2.9486952  1.7753417  0.90311784 -1.318836 -0.1429040
## 4  2.459229 -0.5584171  0.4419581 -0.09586351  0.595442  0.2883342
## 5 -2.102200  1.6300170 -2.3498287  1.36771894 -1.912202 -0.2563821
## 6 -1.835037  0.6577786 -2.9928374  2.13540316 -1.437299 -0.9570006
##      stat119    stat120    stat121    stat122    stat123     stat124
## 1  2.9222729  1.9151262  1.6686068  2.0061224  1.5723072  0.78819227
## 2  2.1828208  0.8283178 -2.4458632  1.7133740  1.1393738 -0.07182054
## 3  0.9721319  1.2723130  2.8002086  2.7670381 -2.2252586  2.17499113
## 4 -1.9327896 -2.5369370  1.7835028  1.0262097 -1.8790983 -0.43639564
## 5  1.3230809 -2.8145256 -0.9547533 -2.0435417 -0.2758764 -1.85668027
## 6  0.1720700 -1.4568460  1.4115051 -0.9878145  2.3895061 -2.33730745
##     stat125    stat126    stat127   stat128    stat129     stat130
## 1  1.588372  1.1620011 -0.2474264  1.650328  2.5147598  0.37283245
## 2 -1.173771  0.8162020  0.3510315 -1.263667  1.7245284 -0.72852904
## 3 -1.503497 -0.5656394  2.8040256 -2.139287 -1.7221642  2.17899609
## 4  1.040967 -2.9039600  0.3103742  1.462339 -1.2940350 -2.95015502
## 5 -2.866184  1.6885070 -2.2525666 -2.628631  1.8581577  2.80127025
## 6 -1.355111  1.5017927  0.4295921 -0.580415  0.9851009 -0.03773117
##       stat131    stat132    stat133    stat134    stat135      stat136
## 1 -0.09028241  0.5194538  2.8478346  2.6664724 -2.0206311  1.398415090
## 2 -0.53045595  1.4134049  2.9180586  0.3299096  1.4784122 -1.278896090
## 3  1.35843194  0.2279946  0.3532595  0.6138676 -0.3443284  0.057763811
## 4 -1.92450273  1.2698178 -1.5299660 -2.6083462  1.1665530 -0.187791914
## 5  1.49036849  2.6337729 -2.3206244  0.4978287 -1.7397571  0.001200184
## 6 -0.64642709 -1.9256228  1.7032650 -0.9152725 -0.3188055  2.155395980
##      stat137    stat138    stat139    stat140    stat141    stat142
## 1 -1.2794871  0.4064890 -0.4539998  2.6660173 -1.8375313  0.4711883
## 2 -2.7709017 -1.6303773 -1.9025910  0.2572918  0.6612002  1.4764348
## 3 -1.1930757 -0.1051243 -0.5108380 -1.0879666  2.4969513 -0.9477230
## 4 -1.2318919  2.2348571  0.1788580 -1.5851788 -1.2384283 -2.1859181
## 5  1.8685058  2.7229517 -2.9077182  2.6606939 -1.5963592 -2.2213492
## 6 -0.4807318 -1.2117369 -0.9358531 -2.5100758 -2.3803916 -0.7096854
##     stat143    stat144    stat145    stat146     stat147    stat148
## 1 1.9466263  2.2689433 -0.3597288 -0.6551386  1.65438592  0.6404466
## 2 1.3156421  2.4459090 -0.3790028  1.4858465 -0.07784461  1.0096149
## 3 0.1959563  2.3062942  1.8459278  2.6848175 -2.70935774 -1.2093409
## 4 1.7633296 -2.8171508  2.0902622 -2.6625464 -1.12600601 -2.1926479
## 5 0.3885758  1.8160636  2.8257299 -1.4526173  1.60679603  2.3807991
## 6 0.7623450  0.2692145 -2.4307463 -2.1244523 -2.67803812 -1.5273387
##      stat149   stat150    stat151    stat152    stat153    stat154
## 1  0.1583575 0.4755351  0.3213410  2.0241520  1.5720103 -0.1825875
## 2 -0.4311406 2.9577663  0.6937252  0.1397280  0.3775735 -1.1012636
## 3 -0.8352824 2.5716205  1.7528236  0.4326277 -2.2334397 -2.6265771
## 4 -2.8069143 1.8813509  2.3358023  0.1015632  1.2117474 -1.3714278
## 5 -1.6166265 1.1112266 -1.1998471  2.9316769 -2.1676455 -0.3411089
## 6 -0.2265472 2.7264354 -1.6746094 -2.3376281 -1.7022788 -1.2352397
##     stat155     stat156    stat157    stat158     stat159   stat160
## 1 -1.139657  0.07061254  0.5893906 -1.9920996 -2.83714366  2.249398
## 2 -2.041093  0.74047768  2.5415072 -1.2697256 -1.64364433 -2.448922
## 3 -1.219507 -0.55198693  0.4046920  1.2098547 -0.90412390 -1.934093
## 4  2.992191  2.33222485  2.0622969 -0.6714653  2.76836085 -1.431120
## 5 -2.362356 -1.23906672  0.4746319 -0.7849202  0.69399995  2.052411
## 6 -1.604499  1.31051409 -0.5164744  0.6288667  0.07899523 -2.287402
##      stat161    stat162    stat163    stat164    stat165    stat166
## 1  1.7182635 -1.2323593  2.7350423  1.0707235  1.1621544  0.9493989
## 2 -0.6247674  2.6740098  2.8211024  1.5561292 -1.1027147  1.0519739
## 3 -0.6230453 -0.7993517 -2.8318374 -1.1148673  1.4261659  0.5294309
## 4  1.7644744  0.1696584  1.2653207  0.6621516  0.9470508  0.1985014
## 5 -1.2070210  0.7243784  0.9736322  2.7426259 -2.6862383  1.6840212
## 6  2.3705316 -2.1667893 -0.2516685 -0.8425958 -1.9099342 -2.8607297
##      stat167    stat168    stat169     stat170     stat171    stat172
## 1  0.1146510  2.3872008  1.1180918 -0.95370555 -2.25076509  0.2348182
## 2  1.0760417 -2.0449336  0.9715676 -0.40173489 -0.11953555 -2.3107369
## 3  1.1735898  1.3860190 -2.2894719  0.06350347  0.29191551 -1.6079744
## 4  2.5511832  0.5446648  1.2694012 -0.84571201  0.79789722  0.2623538
## 5  2.2900002  2.6289782 -0.2783571  1.39032829 -0.55532032  1.0499046
## 6 -0.7513983  2.9617066 -2.2119520 -1.71958113 -0.01452018 -0.2751517
##       stat173   stat174     stat175    stat176     stat177    stat178
## 1  1.79366076 -1.920206 -0.38841942  0.8530301  1.64532077 -1.1354179
## 2 -0.07484659  1.337846  2.20911694  0.9616837 -2.80810070 -2.1136749
## 3 -1.05521810 -1.483741  0.06148359  2.3066039 -0.34688616  1.1840581
## 4  0.31460321  1.195741  2.97633862  1.1685091 -0.06346265  1.4205489
## 5 -1.39428365  2.458523  0.64836472 -1.0396386 -0.57828104 -0.5006818
## 6  2.31844401  1.239864 -2.06490874  0.7696204 -1.77586019  2.0855925
##      stat179    stat180     stat181    stat182    stat183    stat184
## 1  2.0018647  0.1476815 -1.27279520  1.9181504 -0.5297624 -2.9718938
## 2 -2.1351449  2.9012582 -1.09914911 -2.5488517 -2.8377736  1.4073374
## 3 -1.7819908  2.9902627  0.81908613  0.2503852  0.3712984 -2.1714024
## 4 -0.1026974 -2.4763253 -2.52645421  1.3096315  2.1458161 -1.5228094
## 5 -2.2298794  2.4465680 -0.70346898 -1.6997617  2.9178164 -0.3615532
## 6 -1.1168108  1.5552123 -0.01361342  1.7338791 -1.1104763  0.1882416
##      stat185    stat186   stat187    stat188    stat189    stat190
## 1 -0.1043832 -1.5047463  2.700351 -2.4780862 -1.9078265  0.9978108
## 2 -2.0310574 -0.5380074 -1.963275 -1.2221278 -2.4290681 -1.9515115
## 3  2.6727278  1.2688179 -1.399018 -2.9612138  2.6456394  2.0073323
## 4 -2.7796295  2.0682354  2.243727  0.4296881  0.1931333  2.2710960
## 5 -0.6231265  2.5833981  2.229041  0.8139584  1.4544131  1.8886451
## 6  2.7204690 -2.4469144 -1.421998  1.7477882 -0.1481806  0.6011560
##      stat191    stat192    stat193    stat194   stat195    stat196
## 1 -0.6644351  2.6270833 -1.1094601 -2.4200392  2.870713 -0.6590932
## 2 -0.6483142  1.4519118 -0.1963493 -2.3025322  1.255608  2.1617947
## 3 -1.5457382 -0.2977442 -1.7045015  0.7962404 -1.696063 -1.4771117
## 4 -1.1780495 -2.9747574 -1.1471518 -1.2377013 -1.010672 -2.6055975
## 5  2.8813178 -1.8964081 -1.2653487 -1.7839754 -2.872581  2.3033464
## 6  0.4437973  0.6599325 -1.4029555 -2.3118258 -1.792232  1.3934380
##       stat197    stat198    stat199    stat200    stat201    stat202
## 1 -0.83056986  0.9550526 -1.7025776 -2.8263099 -0.7023998  0.2272806
## 2 -1.42178249 -1.2471864  2.5723093 -0.0233496 -1.8975239  1.9472262
## 3 -0.19233958 -0.5161456  0.0279946 -1.2333704 -2.9672263 -2.8666208
## 4 -1.23145902  1.4728470 -0.4562025 -2.2983441 -1.5101184  0.2530525
## 5  1.85018563 -1.8269292 -0.6337969 -2.1473246  0.9909850  1.0950903
## 6 -0.09311061  0.5144456 -2.8178268 -2.7555969 -2.3546004 -1.0558939
##        stat203      stat204    stat205    stat206    stat207    stat208
## 1  1.166631220  0.007453276  2.9961641  1.5327307 -2.2293356 -0.9946009
## 2 -0.235396504  2.132749800  0.3707606  1.5604026 -1.0089217  2.1474257
## 3  0.003180946  2.229793310  2.7354040  0.8992231  2.9694967  2.3081024
## 4 -0.474482715 -1.584772230 -2.3224132 -0.9409741 -2.3179255  0.8032548
## 5  2.349412920 -1.276320220 -2.0203719 -1.1733509  1.0371852 -2.5086207
## 6  0.727436960 -0.960191786 -0.8964998 -1.6406623 -0.2330488  1.7993879
##      stat209    stat210   stat211    stat212    stat213    stat214
## 1 -2.2182105 -1.4099774 -1.656754  2.6602585 -2.9270992  1.1240714
## 2 -2.8932488 -1.1641679 -2.605423 -1.5650513  2.9523673  2.0266318
## 3 -1.8279589  0.0472350 -2.026734  2.5054367  0.9903042  0.3274105
## 4 -1.0878067  0.1171303  2.645891 -1.6775225  1.3452160  1.4694063
## 5 -0.8158175  0.4060950  0.912256  0.2925677  2.1610141  0.5679936
## 6 -2.2664354 -0.2061083 -1.435174  2.6645632  0.4216259 -0.6419122
##      stat215    stat216    stat217
## 1 -2.7510750 -0.5501796  1.2638469
## 2  2.8934650 -2.4099574 -1.2411407
## 3 -1.0947676  1.2852937  1.5411530
## 4  0.6343777  0.1345372  2.9102673
## 5  0.9908702  1.7909757 -2.0902610
## 6 -2.8113887 -1.0624912  0.2765074
features = features.highprec
#str(features) 

Checking correlations to evaluate removal of redundant features

corr.matrix = round(cor(features[sapply(features, is.numeric)]),2)

# filter out only highly correlated variables
threshold = 0.6
corr.matrix.tmp = corr.matrix
diag(corr.matrix.tmp) = 0
high.corr = apply(abs(corr.matrix.tmp) >= threshold, 1, any)
high.corr.matrix = corr.matrix.tmp[high.corr, high.corr]

DT::datatable(corr.matrix)
DT::datatable(high.corr.matrix)

Feature Names

feature.names = colnames(features)
drops <- c('JobName')
feature.names = feature.names[!(feature.names %in% drops)]
#str(feature.names)

Read and Clean Labels

labels = read.csv("../../Data/labels.csv")
#str(labels)
labels = labels[,c("JobName", output.var)]
summary(labels)
##       JobName           y3        
##  Job_00001:   1   Min.   : 95.91  
##  Job_00002:   1   1st Qu.:118.21  
##  Job_00003:   1   Median :123.99  
##  Job_00004:   1   Mean   :125.36  
##  Job_00005:   1   3rd Qu.:131.06  
##  Job_00006:   1   Max.   :193.73  
##  (Other)  :9994   NA's   :2497

Merge Datasets

data <- merge(features, labels, by = 'JobName')
drops <- c('JobName')
data = data[,(!colnames(data) %in% drops)]
#str(data)

Transformations

if (transform.abs == TRUE){
  data[,label.names] = 10^(data[,label.names]/20)
  #data = filter(data, y3 < 1E7)
}
if (log.pred == TRUE){
  data[label.names] = log(data[alt.scale.label.name],10)
  
  drops = c(alt.scale.label.name)
  data = data[!(names(data) %in% drops)]
}

t = NULL # initializw to NULL for other cases
if (norm.pred){
  t = bestNormalize::bestNormalize(data[[alt.scale.label.name]])
  data[label.names] = predict(t)
  
  drops = c(alt.scale.label.name)
  data = data[!(names(data) %in% drops)]
}
#str(data)

Remove NA Cases

data = data[complete.cases(data),]

Exploratory Data Analysis

Check correlation of Label with Featires

if (eda == TRUE){
  corr.to.label =round(cor(dplyr::select(data,-one_of(label.names)),dplyr::select_at(data,label.names)),4)
  DT::datatable(corr.to.label)
}

Multicollinearity - VIF

if (eda == TRUE){
  vifDF = usdm::vif(select_at(data,feature.names)) %>% arrange(desc(VIF))
  head(vifDF,10)
}

Scatterplots

panel.hist <- function(x, ...)
{
    usr <- par("usr"); on.exit(par(usr))
    par(usr = c(usr[1:2], 0, 1.5) )
    h <- hist(x, plot = FALSE)
    breaks <- h$breaks; nB <- length(breaks)
    y <- h$counts; y <- y/max(y)
    rect(breaks[-nB], 0, breaks[-1], y, col = "cyan", ...)
}
if (eda == TRUE){
  histogram(data[ ,label.names])
  #hist(data[complete.cases(data),alt.scale.label.name])
}
# https://stackoverflow.com/questions/24648729/plot-one-numeric-variable-against-n-numeric-variables-in-n-plots
ind.pairs.plot <- function(data, xvars=NULL, yvar)
{
    df <- data
    if (is.null(xvars)) {
        xvars = names(data[which(names(data)!=yvar)])       
    }   

    #choose a format to display charts
    ncharts <- length(xvars) 
    
    for(i in 1:ncharts){    
        plot(df[,xvars[i]],df[,yvar], xlab = xvars[i], ylab = yvar)
    }
}

if (eda == TRUE){
  ind.pairs.plot(data, feature.names, label.names)
}

# 
# pl <- ggplot(data, aes(x=x18, y = y3))
# pl2 <- pl + geom_point(aes(alpha = 0.1)) # default color gradient based on 'hp'
# print(pl2)

Feature Engineering

if(eda ==FALSE){
  # x18 may need transformations
  plot(data[,'x18'], data[,label.names], main = "Original Scatter Plot vs. x18", ylab = label.names, xlab = 'x18')
  plot(sqrt(data[,'x18']), data[,label.names], main = "Original Scatter Plot vs. sqrt(x18)", ylab = label.names, xlab = 'sqrt(x18)')
  
  # transforming x18
  data$sqrt.x18 = sqrt(data$x18)
  data = dplyr::select(data,-one_of('x18'))
  
  # what about x7, x9?
  # x11 looks like data is at discrete points after a while. Will this be a problem?
}

Modeling

Train Test Split

data = data[sample(nrow(data)),] # randomly shuffle data
split = sample.split(data[,label.names], SplitRatio = 0.8)

data.train = subset(data, split == TRUE)
data.test = subset(data, split == FALSE)

Common Functions

plot.diagnostics <-  function(model, train) {
  plot(model)
  
  residuals = resid(model) # Plotted above in plot(lm.out)
  r.standard = rstandard(model)
  r.student = rstudent(model)

  plot(predict(model,train),r.student,
      ylab="Student Residuals", xlab="Predicted Values", 
      main="Student Residual Plot") 
  abline(0, 0)
  
  plot(predict(model, train),r.standard,
      ylab="Standard Residuals", xlab="Predicted Values", 
      main="Standard Residual Plot") 
  abline(0, 0)
  abline(2, 0)
  abline(-2, 0)
  
  # Histogram
  hist(r.student, freq=FALSE, main="Distribution of Studentized Residuals", 
  xlab="Studentized Residuals", ylab="Density", ylim=c(0,0.5))

  # Create range of x-values for normal curve
  xfit <- seq(min(r.student)-1, max(r.student)+1, length=40)

  # Generate values from the normal distribution at the specified values
  yfit <- (dnorm(xfit))

  # Add the normal curve
  lines(xfit, yfit, ylim=c(0,0.5))
  
  
  # http://www.stat.columbia.edu/~martin/W2024/R7.pdf
  # Influential plots
  inf.meas = influence.measures(model)
  # print (summary(inf.meas)) # too much data
  
  # Leverage plot
  lev = hat(model.matrix(model))
  plot(lev, ylab = 'Leverage - check')
  
  # Cook's Distance
  cd = cooks.distance(model)
  plot(cd,ylab="Cooks distances")
  abline(4/nrow(train),0)
  abline(1,0)
  
  print (paste("Number of data points that have Cook's D > 4/n: ", length(cd[cd > 4/nrow(train)]), sep = "")) 
  print (paste("Number of data points that have Cook's D > 1: ", length(cd[cd > 1]), sep = "")) 
  return(cd)
}

train.caret.glmselect = function(formula, data, method
                                 ,subopt = NULL, feature.names
                                 , train.control = NULL, tune.grid = NULL, pre.proc = NULL){
  
  if(is.null(train.control)){
    train.control <- trainControl(method = "cv"
                              ,number = 10
                              ,search = "grid"
                              ,verboseIter = TRUE
                              ,allowParallel = TRUE
                              )
  }
  
  if(is.null(tune.grid)){
    if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
      tune.grid = data.frame(nvmax = 1:length(feature.names))
    }
    if (method == 'glmnet' && subopt == 'LASSO'){
      # Will only show 1 Lambda value during training, but that is OK
      # https://stackoverflow.com/questions/47526544/why-need-to-tune-lambda-with-carettrain-method-glmnet-and-cv-glmnet
      # Another option for LASSO is this: https://github.com/topepo/caret/blob/master/RegressionTests/Code/lasso.R
      lambda = 10^seq(-2,0, length =100)
      alpha = c(1)
      tune.grid = expand.grid(alpha = alpha,lambda = lambda)
    }
    if (method == 'lars'){
      # https://github.com/topepo/caret/blob/master/RegressionTests/Code/lars.R
      fraction = seq(0, 1, length = 100)
      tune.grid = expand.grid(fraction = fraction)
      pre.proc = c("center", "scale") 
    }
  }
  
  # http://sshaikh.org/2015/05/06/parallelize-machine-learning-in-r-with-multi-core-cpus/
  cl <- makeCluster(detectCores()*0.75) # use 75% of cores only, leave rest for other tasks
  registerDoParallel(cl)

  set.seed(1) 
  # note that the seed has to actually be set just before this function is called
  # settign is above just not ensure reproducibility for some reason
  model.caret <- caret::train(formula
                              , data = data
                              , method = method
                              , tuneGrid = tune.grid
                              , trControl = train.control
                              , preProc = pre.proc
                              )
  
  stopCluster(cl)
  registerDoSEQ() # register sequential engine in case you are not using this function anymore
  
  if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
    print(model.caret$results) # all model results
    print(model.caret$bestTune) # best model
  
    model = model.caret$finalModel

    # Metrics Plot 
    dataPlot = model.caret$results %>%
      gather(key='metric',value='value',-nvmax) %>%
      dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
    metricsPlot = ggplot(data=dataPlot,aes(x=nvmax,y=value) ) +
      geom_line(color='lightblue4') +
      geom_point(color='blue',alpha=0.7,size=.9) +
      facet_wrap(~metric,ncol=4,scales='free_y')+
      theme_light()
    plot(metricsPlot)
    
    # Residuals Plot
    # leap function does not support studentized residuals
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)
   
    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') + 
      theme_light()
    plot(residHistogram)
    
    # Provides the coefficients of the best model
    id = rownames(model.caret$bestTune)
    message("Coefficients of final model:")
    print (coef(model, id = id))
    
    return(list(model = model,id = id, residPlot = residPlot, residHistogram=residHistogram))
  }
  if (method == 'glmnet' && subopt == 'LASSO'){
    print(model.caret)
    print(plot(model.caret))
    print(model.caret$bestTune)
    
    print(model.caret$results)
    
    # Metrics Plot 
    dataPlot = model.caret$results %>%
      gather(key='metric',value='value',-lambda) %>%
      dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
    metricsPlot = ggplot(data=dataPlot,aes(x=lambda,y=value) ) +
      geom_line(color='lightblue4') +
      geom_point(color='blue',alpha=0.7,size=.9) +
      facet_wrap(~metric,ncol=4,scales='free_y')+
      theme_light()
    plot(metricsPlot)
    
    # Residuals Plot 
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)

    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') +
      theme_light()
    plot(residHistogram)

    id = NULL # not really needed but added for consistency
    return(list(model = model.caret,id = id, residPlot = residPlot, metricsPlot=metricsPlot ))
  }
  if (method == 'lars'){
    print(model.caret)
    print(plot(model.caret))
    print(model.caret$bestTune)
    
    # Metrics Plot
    dataPlot = model.caret$results %>%
        gather(key='metric',value='value',-fraction) %>%
      dplyr::filter(metric %in% c('MAE','RMSE','Rsquared'))
    metricsPlot = ggplot(data=dataPlot,aes(x=fraction,y=value) ) +
      geom_line(color='lightblue4') +
      geom_point(color='blue',alpha=0.7,size=.9) +
      facet_wrap(~metric,ncol=4,scales='free_y')+
      theme_light()
    plot(metricsPlot)
    
    # Residuals Plot
    dataPlot=data.frame(pred=predict(model.caret,data),res=resid(model.caret))
    residPlot = ggplot(dataPlot,aes(x=pred,y=res)) +
      geom_point(color='light blue',alpha=0.7) +
      geom_smooth()+
      theme_light()
    plot(residPlot)

    residHistogram = ggplot(dataPlot,aes(x=res)) +
      geom_histogram(aes(y=..density..),fill='light blue',alpha=1) +
      geom_density(color='lightblue4') + 
      theme_light()
    plot(residHistogram)

    id = NULL # not really needed but added for consistency
    return(list(model = model.caret,id = id, residPlot = residPlot, residHistogram=residHistogram))
  }
}

# https://stackoverflow.com/questions/48265743/linear-model-subset-selection-goodness-of-fit-with-k-fold-cross-validation
# changed slightly since call[[2]] was just returning "formula" without actually returnign the value in formula
predict.regsubsets <- function(object, newdata, id, formula, ...) {
    #form <- as.formula(object$call[[2]])
    mat <- model.matrix(formula, newdata) # adds intercept and expands any interaction terms
    coefi <- coef(object, id = id)
    xvars <- names(coefi)
    return(mat[,xvars]%*%coefi)
}
  
test.model = function(model, test, level=0.95
                      ,draw.limits = FALSE, good = 0.1, ok = 0.15
                      ,method = NULL, subopt = NULL
                      ,id = NULL, formula, feature.names, label.names
                      ,transformation = NULL){
  ## if using caret for glm select equivalent functionality, 
  ## need to pass formula (full is ok as it will select subset of variables from there)
  if (is.null(method)){
    pred = predict(model, newdata=test, interval="confidence", level = level) 
  }
  
  if (method == 'leapForward' | method == 'leapBackward' | method == 'leapSeq'){
    pred = predict.regsubsets(model, newdata = test, id = id, formula = formula)
  }
  
  if (method == 'glmnet' && subopt == 'LASSO'){
    xtest = as.matrix(test[,feature.names]) 
    pred=as.data.frame(predict(model, xtest))
  }
  
  if (method == 'lars'){
    pred=as.data.frame(predict(model, newdata = test))
  }
    
  # Summary of predicted values
  print ("Summary of predicted values: ")
  print(summary(pred[,1]))

  test.mse = mean((test[,label.names]-pred[,1])^2)
  print (paste(method, subopt, "Test MSE:", test.mse, sep=" "))
  
  if(log.pred == TRUE || norm.pred == TRUE){
    # plot transformewd comparison first
    plot(test[,label.names],pred[,1],xlab = "Actual (Transformed)", ylab = "Predicted (Transformed)")
  }
    
  if (log.pred == FALSE && norm.pred == FALSE){
    x = test[,label.names]
    y = pred[,1]
  }
  if (log.pred == TRUE){
    x = 10^test[,label.names]
    y = 10^pred[,1]  
  }
  if (norm.pred == TRUE){
    x = predict(transformation, test[,label.names], inverse = TRUE)
    y = predict(transformation, pred[,1], inverse = TRUE)
  }

  plot(x, y, xlab = "Actual", ylab = "Predicted")
  abline(0,(1+good),col='green', lwd = 3)
  abline(0,(1-good),col='green', lwd = 3)
  abline(0,(1+ok),col='blue', lwd = 3)
  abline(0,(1-ok),col='blue', lwd = 3)
  
}

Setup Formulae

n <- names(data.train)
 formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~", paste(n[!n %in% label.names], collapse = " + "))) 

# ind.interact = c("x4","x7","x8", "x9", "x10", "x11", "x14", "x16", "x17", "x21", "sqrt.x18")
# ind.nointeract = c("stat13", "stat14", "stat24", "stat60", "stat98", "stat110", "stat144", "stat149")
# 
# interact = paste(ind.interact, collapse = " + ")
# nointeract = paste(ind.nointeract, collapse = " + ")
# 
# # ^2 is 2 way interaction, ^3 is 3 way interaction
# formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + "), "~ (", interact, " )^2 ", " + ", nointeract ))
# 
# # # * is all way interaction
# # formula <- as.formula(paste(paste(n[n %in% label.names], collapse = " + "), "~ (", interact, " ) ", " + ", nointeract ))

grand.mean.formula = as.formula(paste(paste(n[n %in% label.names], collapse = " + ")," ~ 1"))

print(formula)
## log.y3 ~ x1 + x2 + x3 + x4 + x5 + x6 + x7 + x8 + x9 + x10 + x11 + 
##     x12 + x13 + x14 + x15 + x16 + x17 + x19 + x20 + x21 + x22 + 
##     x23 + stat1 + stat2 + stat3 + stat4 + stat5 + stat6 + stat7 + 
##     stat8 + stat9 + stat10 + stat11 + stat12 + stat13 + stat14 + 
##     stat15 + stat16 + stat17 + stat18 + stat19 + stat20 + stat21 + 
##     stat22 + stat23 + stat24 + stat25 + stat26 + stat27 + stat28 + 
##     stat29 + stat30 + stat31 + stat32 + stat33 + stat34 + stat35 + 
##     stat36 + stat37 + stat38 + stat39 + stat40 + stat41 + stat42 + 
##     stat43 + stat44 + stat45 + stat46 + stat47 + stat48 + stat49 + 
##     stat50 + stat51 + stat52 + stat53 + stat54 + stat55 + stat56 + 
##     stat57 + stat58 + stat59 + stat60 + stat61 + stat62 + stat63 + 
##     stat64 + stat65 + stat66 + stat67 + stat68 + stat69 + stat70 + 
##     stat71 + stat72 + stat73 + stat74 + stat75 + stat76 + stat77 + 
##     stat78 + stat79 + stat80 + stat81 + stat82 + stat83 + stat84 + 
##     stat85 + stat86 + stat87 + stat88 + stat89 + stat90 + stat91 + 
##     stat92 + stat93 + stat94 + stat95 + stat96 + stat97 + stat98 + 
##     stat99 + stat100 + stat101 + stat102 + stat103 + stat104 + 
##     stat105 + stat106 + stat107 + stat108 + stat109 + stat110 + 
##     stat111 + stat112 + stat113 + stat114 + stat115 + stat116 + 
##     stat117 + stat118 + stat119 + stat120 + stat121 + stat122 + 
##     stat123 + stat124 + stat125 + stat126 + stat127 + stat128 + 
##     stat129 + stat130 + stat131 + stat132 + stat133 + stat134 + 
##     stat135 + stat136 + stat137 + stat138 + stat139 + stat140 + 
##     stat141 + stat142 + stat143 + stat144 + stat145 + stat146 + 
##     stat147 + stat148 + stat149 + stat150 + stat151 + stat152 + 
##     stat153 + stat154 + stat155 + stat156 + stat157 + stat158 + 
##     stat159 + stat160 + stat161 + stat162 + stat163 + stat164 + 
##     stat165 + stat166 + stat167 + stat168 + stat169 + stat170 + 
##     stat171 + stat172 + stat173 + stat174 + stat175 + stat176 + 
##     stat177 + stat178 + stat179 + stat180 + stat181 + stat182 + 
##     stat183 + stat184 + stat185 + stat186 + stat187 + stat188 + 
##     stat189 + stat190 + stat191 + stat192 + stat193 + stat194 + 
##     stat195 + stat196 + stat197 + stat198 + stat199 + stat200 + 
##     stat201 + stat202 + stat203 + stat204 + stat205 + stat206 + 
##     stat207 + stat208 + stat209 + stat210 + stat211 + stat212 + 
##     stat213 + stat214 + stat215 + stat216 + stat217 + sqrt.x18
print(grand.mean.formula)
## log.y3 ~ 1
# Update feature.names because we may have transformed some features
feature.names = n[!n %in% label.names]

Full Model

model.full = lm(formula , data.train)
summary(model.full)
## 
## Call:
## lm(formula = formula, data = data.train)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.080692 -0.020501 -0.004585  0.016473  0.186465 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  1.969e+00  9.194e-03 214.199  < 2e-16 ***
## x1          -3.714e-04  6.308e-04  -0.589 0.556016    
## x2           2.029e-04  4.033e-04   0.503 0.614904    
## x3           6.598e-05  1.104e-04   0.598 0.550025    
## x4          -4.635e-05  8.694e-06  -5.331 1.01e-07 ***
## x5           4.189e-04  2.854e-04   1.468 0.142244    
## x6          -2.039e-04  5.787e-04  -0.352 0.724562    
## x7           1.135e-02  6.123e-04  18.539  < 2e-16 ***
## x8           4.744e-04  1.435e-04   3.306 0.000952 ***
## x9           3.509e-03  3.192e-04  10.994  < 2e-16 ***
## x10          1.151e-03  2.981e-04   3.861 0.000114 ***
## x11          1.923e+05  7.145e+04   2.691 0.007134 ** 
## x12         -2.364e-04  1.822e-04  -1.298 0.194401    
## x13          8.768e-05  7.255e-05   1.208 0.226915    
## x14         -5.978e-04  3.117e-04  -1.918 0.055222 .  
## x15          2.038e-04  2.974e-04   0.685 0.493261    
## x16          7.069e-04  2.063e-04   3.426 0.000617 ***
## x17          1.501e-03  3.129e-04   4.797 1.65e-06 ***
## x19          2.802e-04  1.588e-04   1.764 0.077810 .  
## x20         -8.767e-04  1.107e-03  -0.792 0.428400    
## x21          1.448e-04  4.069e-05   3.558 0.000377 ***
## x22         -6.905e-04  3.335e-04  -2.070 0.038458 *  
## x23         -2.608e-04  3.162e-04  -0.825 0.409534    
## stat1       -4.274e-05  2.398e-04  -0.178 0.858540    
## stat2        9.978e-06  2.380e-04   0.042 0.966553    
## stat3        3.438e-04  2.405e-04   1.430 0.152833    
## stat4       -4.653e-04  2.409e-04  -1.931 0.053472 .  
## stat5       -4.413e-05  2.403e-04  -0.184 0.854277    
## stat6       -3.359e-04  2.391e-04  -1.404 0.160225    
## stat7       -3.452e-05  2.400e-04  -0.144 0.885629    
## stat8       -1.619e-04  2.396e-04  -0.676 0.499300    
## stat9        1.438e-04  2.394e-04   0.601 0.548151    
## stat10      -2.989e-04  2.392e-04  -1.250 0.211444    
## stat11      -1.660e-05  2.414e-04  -0.069 0.945179    
## stat12       1.843e-04  2.382e-04   0.774 0.439251    
## stat13      -4.257e-04  2.380e-04  -1.788 0.073766 .  
## stat14      -7.296e-04  2.377e-04  -3.070 0.002152 ** 
## stat15      -2.308e-04  2.378e-04  -0.971 0.331726    
## stat16      -6.308e-05  2.379e-04  -0.265 0.790903    
## stat17      -1.660e-04  2.362e-04  -0.703 0.482233    
## stat18      -2.890e-04  2.369e-04  -1.220 0.222689    
## stat19       2.705e-04  2.398e-04   1.128 0.259296    
## stat20      -3.758e-04  2.376e-04  -1.581 0.113844    
## stat21       3.509e-05  2.398e-04   0.146 0.883665    
## stat22      -4.594e-04  2.406e-04  -1.909 0.056271 .  
## stat23       7.172e-04  2.382e-04   3.011 0.002612 ** 
## stat24      -3.358e-04  2.385e-04  -1.408 0.159248    
## stat25      -3.531e-04  2.378e-04  -1.485 0.137662    
## stat26      -1.834e-04  2.393e-04  -0.766 0.443639    
## stat27       2.227e-04  2.395e-04   0.930 0.352520    
## stat28       3.005e-05  2.393e-04   0.126 0.900059    
## stat29      -1.420e-05  2.413e-04  -0.059 0.953085    
## stat30       1.690e-04  2.412e-04   0.701 0.483525    
## stat31      -9.686e-05  2.414e-04  -0.401 0.688250    
## stat32       7.413e-05  2.398e-04   0.309 0.757283    
## stat33      -2.770e-04  2.390e-04  -1.159 0.246600    
## stat34       2.465e-04  2.386e-04   1.033 0.301578    
## stat35      -4.587e-04  2.393e-04  -1.917 0.055306 .  
## stat36      -3.615e-05  2.372e-04  -0.152 0.878856    
## stat37      -1.976e-04  2.418e-04  -0.817 0.414032    
## stat38       3.317e-04  2.398e-04   1.383 0.166685    
## stat39      -3.836e-04  2.377e-04  -1.614 0.106651    
## stat40      -3.543e-07  2.395e-04  -0.001 0.998819    
## stat41      -1.833e-04  2.382e-04  -0.770 0.441490    
## stat42      -5.272e-04  2.388e-04  -2.208 0.027283 *  
## stat43      -3.186e-04  2.414e-04  -1.320 0.186983    
## stat44       1.813e-04  2.396e-04   0.757 0.449135    
## stat45      -5.480e-04  2.384e-04  -2.299 0.021559 *  
## stat46       2.465e-04  2.394e-04   1.030 0.303078    
## stat47       3.648e-05  2.411e-04   0.151 0.879742    
## stat48       3.071e-04  2.384e-04   1.288 0.197804    
## stat49       2.038e-04  2.377e-04   0.857 0.391442    
## stat50       2.018e-04  2.382e-04   0.847 0.396940    
## stat51       2.801e-04  2.381e-04   1.176 0.239570    
## stat52      -1.407e-04  2.395e-04  -0.587 0.557058    
## stat53      -1.050e-04  2.412e-04  -0.435 0.663516    
## stat54      -5.186e-04  2.407e-04  -2.155 0.031225 *  
## stat55       3.869e-04  2.366e-04   1.635 0.102017    
## stat56      -2.670e-04  2.382e-04  -1.121 0.262241    
## stat57       1.740e-04  2.380e-04   0.731 0.464909    
## stat58      -1.002e-04  2.372e-04  -0.423 0.672600    
## stat59       2.894e-04  2.387e-04   1.213 0.225290    
## stat60       6.384e-04  2.402e-04   2.657 0.007900 ** 
## stat61       8.127e-06  2.387e-04   0.034 0.972838    
## stat62      -2.989e-04  2.395e-04  -1.248 0.212096    
## stat63       1.208e-04  2.389e-04   0.506 0.613000    
## stat64      -1.151e-04  2.384e-04  -0.483 0.629287    
## stat65      -2.970e-04  2.402e-04  -1.237 0.216252    
## stat66       9.670e-05  2.410e-04   0.401 0.688301    
## stat67       3.277e-05  2.400e-04   0.137 0.891418    
## stat68      -9.402e-05  2.399e-04  -0.392 0.695181    
## stat69      -3.027e-05  2.391e-04  -0.127 0.899267    
## stat70       1.742e-04  2.379e-04   0.732 0.463981    
## stat71       1.255e-04  2.374e-04   0.529 0.596903    
## stat72       3.614e-04  2.419e-04   1.494 0.135221    
## stat73       3.937e-05  2.397e-04   0.164 0.869544    
## stat74      -5.566e-06  2.402e-04  -0.023 0.981511    
## stat75      -6.287e-05  2.408e-04  -0.261 0.794025    
## stat76      -5.627e-06  2.401e-04  -0.023 0.981304    
## stat77      -9.281e-05  2.401e-04  -0.386 0.699167    
## stat78      -1.718e-04  2.402e-04  -0.715 0.474502    
## stat79      -1.057e-04  2.388e-04  -0.442 0.658158    
## stat80       2.677e-04  2.396e-04   1.117 0.263915    
## stat81       3.148e-04  2.389e-04   1.317 0.187726    
## stat82       7.126e-05  2.393e-04   0.298 0.765899    
## stat83       4.664e-05  2.388e-04   0.195 0.845138    
## stat84      -3.076e-05  2.385e-04  -0.129 0.897386    
## stat85      -5.686e-05  2.395e-04  -0.237 0.812350    
## stat86       2.014e-04  2.397e-04   0.840 0.400779    
## stat87      -2.418e-04  2.402e-04  -1.007 0.314148    
## stat88      -1.391e-04  2.368e-04  -0.587 0.557010    
## stat89      -3.123e-04  2.389e-04  -1.308 0.191093    
## stat90      -3.623e-04  2.392e-04  -1.514 0.129965    
## stat91      -3.167e-04  2.368e-04  -1.337 0.181168    
## stat92      -2.580e-04  2.387e-04  -1.081 0.279858    
## stat93      -2.738e-05  2.418e-04  -0.113 0.909844    
## stat94      -1.426e-04  2.392e-04  -0.596 0.551079    
## stat95      -1.040e-05  2.384e-04  -0.044 0.965200    
## stat96      -1.130e-04  2.390e-04  -0.473 0.636324    
## stat97       4.083e-05  2.372e-04   0.172 0.863318    
## stat98       3.480e-03  2.360e-04  14.746  < 2e-16 ***
## stat99       4.343e-04  2.411e-04   1.801 0.071786 .  
## stat100      4.028e-04  2.397e-04   1.680 0.092949 .  
## stat101     -1.607e-04  2.403e-04  -0.669 0.503611    
## stat102     -2.073e-06  2.389e-04  -0.009 0.993075    
## stat103     -1.961e-04  2.431e-04  -0.807 0.419927    
## stat104     -4.996e-04  2.372e-04  -2.107 0.035179 *  
## stat105      3.968e-04  2.371e-04   1.674 0.094244 .  
## stat106     -2.467e-04  2.378e-04  -1.038 0.299498    
## stat107     -1.683e-04  2.387e-04  -0.705 0.480981    
## stat108     -1.629e-04  2.394e-04  -0.680 0.496271    
## stat109      1.383e-04  2.392e-04   0.578 0.563092    
## stat110     -3.296e-03  2.372e-04 -13.897  < 2e-16 ***
## stat111     -1.197e-04  2.389e-04  -0.501 0.616463    
## stat112     -3.948e-05  2.405e-04  -0.164 0.869622    
## stat113     -2.554e-04  2.411e-04  -1.059 0.289476    
## stat114      3.010e-04  2.392e-04   1.258 0.208333    
## stat115      4.888e-05  2.375e-04   0.206 0.836974    
## stat116      3.341e-04  2.402e-04   1.391 0.164309    
## stat117      2.022e-04  2.397e-04   0.844 0.398908    
## stat118     -9.247e-05  2.383e-04  -0.388 0.697998    
## stat119      1.976e-05  2.402e-04   0.082 0.934456    
## stat120      8.615e-05  2.366e-04   0.364 0.715777    
## stat121     -6.578e-05  2.395e-04  -0.275 0.783563    
## stat122      9.975e-05  2.388e-04   0.418 0.676112    
## stat123      6.077e-05  2.427e-04   0.250 0.802310    
## stat124     -1.801e-04  2.387e-04  -0.755 0.450569    
## stat125      4.136e-04  2.409e-04   1.717 0.086033 .  
## stat126      4.641e-04  2.383e-04   1.948 0.051523 .  
## stat127     -7.579e-05  2.392e-04  -0.317 0.751354    
## stat128     -1.959e-04  2.392e-04  -0.819 0.412671    
## stat129     -1.552e-04  2.378e-04  -0.653 0.514053    
## stat130      1.256e-04  2.397e-04   0.524 0.600360    
## stat131      1.257e-04  2.398e-04   0.524 0.600099    
## stat132     -1.612e-04  2.385e-04  -0.676 0.499162    
## stat133      3.007e-05  2.386e-04   0.126 0.899704    
## stat134     -2.965e-05  2.374e-04  -0.125 0.900591    
## stat135     -5.505e-06  2.402e-04  -0.023 0.981720    
## stat136      1.184e-04  2.401e-04   0.493 0.621768    
## stat137      2.411e-04  2.379e-04   1.013 0.311015    
## stat138      1.097e-04  2.390e-04   0.459 0.646207    
## stat139      2.800e-04  2.401e-04   1.166 0.243552    
## stat140     -2.943e-05  2.380e-04  -0.124 0.901587    
## stat141      2.259e-04  2.364e-04   0.956 0.339351    
## stat142     -8.146e-05  2.417e-04  -0.337 0.736150    
## stat143      2.374e-04  2.401e-04   0.989 0.322837    
## stat144      6.343e-04  2.382e-04   2.663 0.007778 ** 
## stat145      1.908e-04  2.417e-04   0.789 0.429927    
## stat146     -4.891e-04  2.409e-04  -2.030 0.042401 *  
## stat147     -4.230e-04  2.405e-04  -1.758 0.078745 .  
## stat148     -2.812e-04  2.356e-04  -1.193 0.232806    
## stat149     -6.638e-04  2.407e-04  -2.758 0.005827 ** 
## stat150     -1.105e-05  2.393e-04  -0.046 0.963184    
## stat151     -2.253e-04  2.413e-04  -0.934 0.350510    
## stat152     -1.677e-04  2.375e-04  -0.706 0.480281    
## stat153      1.400e-04  2.419e-04   0.579 0.562619    
## stat154      7.415e-05  2.416e-04   0.307 0.758932    
## stat155      1.428e-04  2.390e-04   0.597 0.550345    
## stat156      4.927e-04  2.401e-04   2.052 0.040173 *  
## stat157     -7.008e-06  2.382e-04  -0.029 0.976530    
## stat158      1.138e-04  2.424e-04   0.469 0.638788    
## stat159     -1.946e-04  2.383e-04  -0.817 0.414089    
## stat160      1.364e-04  2.401e-04   0.568 0.569945    
## stat161      1.971e-04  2.406e-04   0.819 0.412732    
## stat162      5.058e-05  2.360e-04   0.214 0.830329    
## stat163      5.641e-07  2.411e-04   0.002 0.998134    
## stat164      1.687e-04  2.400e-04   0.703 0.482032    
## stat165     -9.582e-05  2.378e-04  -0.403 0.687052    
## stat166     -2.864e-04  2.371e-04  -1.208 0.227059    
## stat167     -2.036e-04  2.388e-04  -0.852 0.394040    
## stat168     -9.854e-05  2.389e-04  -0.412 0.679991    
## stat169      1.478e-04  2.393e-04   0.617 0.537021    
## stat170     -2.387e-04  2.383e-04  -1.002 0.316558    
## stat171      3.222e-04  2.398e-04   1.344 0.179110    
## stat172      3.024e-04  2.381e-04   1.270 0.204035    
## stat173     -3.604e-04  2.405e-04  -1.499 0.134003    
## stat174     -2.338e-04  2.391e-04  -0.978 0.328155    
## stat175     -2.978e-04  2.401e-04  -1.240 0.214960    
## stat176     -3.232e-05  2.391e-04  -0.135 0.892508    
## stat177     -1.045e-04  2.399e-04  -0.436 0.663116    
## stat178      1.604e-04  2.414e-04   0.664 0.506402    
## stat179      1.380e-04  2.385e-04   0.579 0.562875    
## stat180      6.324e-06  2.374e-04   0.027 0.978747    
## stat181      1.074e-04  2.410e-04   0.446 0.655848    
## stat182      1.322e-04  2.396e-04   0.552 0.581064    
## stat183      2.643e-04  2.380e-04   1.110 0.266876    
## stat184      7.350e-05  2.406e-04   0.305 0.760048    
## stat185     -2.387e-05  2.378e-04  -0.100 0.920053    
## stat186     -1.529e-04  2.404e-04  -0.636 0.524697    
## stat187     -6.713e-04  2.388e-04  -2.811 0.004950 ** 
## stat188      2.059e-04  2.380e-04   0.865 0.387006    
## stat189     -2.701e-05  2.399e-04  -0.113 0.910369    
## stat190      1.627e-04  2.378e-04   0.684 0.493914    
## stat191     -3.461e-04  2.391e-04  -1.447 0.147879    
## stat192     -2.573e-05  2.422e-04  -0.106 0.915410    
## stat193     -1.104e-04  2.420e-04  -0.456 0.648396    
## stat194     -7.400e-05  2.389e-04  -0.310 0.756712    
## stat195      1.855e-04  2.396e-04   0.774 0.438849    
## stat196      4.512e-05  2.426e-04   0.186 0.852446    
## stat197      3.224e-04  2.374e-04   1.358 0.174531    
## stat198     -5.409e-04  2.392e-04  -2.261 0.023803 *  
## stat199      3.267e-04  2.371e-04   1.378 0.168395    
## stat200     -2.239e-04  2.364e-04  -0.947 0.343750    
## stat201     -4.231e-05  2.385e-04  -0.177 0.859202    
## stat202     -2.507e-04  2.417e-04  -1.037 0.299732    
## stat203      3.672e-05  2.380e-04   0.154 0.877421    
## stat204     -5.177e-04  2.374e-04  -2.181 0.029255 *  
## stat205     -2.181e-04  2.379e-04  -0.917 0.359306    
## stat206      6.729e-05  2.404e-04   0.280 0.779602    
## stat207      3.420e-04  2.383e-04   1.436 0.151175    
## stat208      2.302e-04  2.404e-04   0.958 0.338349    
## stat209     -3.134e-04  2.392e-04  -1.310 0.190098    
## stat210      3.966e-06  2.392e-04   0.017 0.986774    
## stat211     -1.592e-04  2.386e-04  -0.667 0.504808    
## stat212     -9.267e-05  2.405e-04  -0.385 0.700019    
## stat213     -1.623e-04  2.408e-04  -0.674 0.500358    
## stat214     -5.173e-04  2.402e-04  -2.154 0.031275 *  
## stat215     -2.403e-04  2.395e-04  -1.003 0.315689    
## stat216     -2.614e-04  2.394e-04  -1.092 0.274931    
## stat217      3.122e-04  2.399e-04   1.302 0.193114    
## sqrt.x18     2.675e-02  9.130e-04  29.297  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.03145 on 5761 degrees of freedom
## Multiple R-squared:  0.2776, Adjusted R-squared:  0.2475 
## F-statistic: 9.223 on 240 and 5761 DF,  p-value: < 2.2e-16
cd.full = plot.diagnostics(model.full, data.train)

## [1] "Number of data points that have Cook's D > 4/n: 294"
## [1] "Number of data points that have Cook's D > 1: 0"

Checking with removal of high influence points

high.cd = names(cd.full[cd.full > 4/nrow(data.train)])
data.train2 = data.train[!(rownames(data.train)) %in% high.cd,]
model.full2 = lm(formula , data.train2)
summary(model.full2)
## 
## Call:
## lm(formula = formula, data = data.train2)
## 
## Residuals:
##       Min        1Q    Median        3Q       Max 
## -0.062903 -0.017492 -0.002541  0.016588  0.071587 
## 
## Coefficients:
##               Estimate Std. Error t value Pr(>|t|)    
## (Intercept)  1.956e+00  7.579e-03 258.025  < 2e-16 ***
## x1          -4.544e-04  5.204e-04  -0.873 0.382604    
## x2           1.015e-04  3.322e-04   0.306 0.759916    
## x3           4.849e-05  9.057e-05   0.535 0.592416    
## x4          -5.051e-05  7.173e-06  -7.042 2.13e-12 ***
## x5           3.730e-04  2.346e-04   1.590 0.111906    
## x6          -3.688e-04  4.763e-04  -0.774 0.438758    
## x7           1.245e-02  5.044e-04  24.684  < 2e-16 ***
## x8           5.583e-04  1.183e-04   4.719 2.43e-06 ***
## x9           3.210e-03  2.622e-04  12.243  < 2e-16 ***
## x10          1.519e-03  2.454e-04   6.189 6.50e-10 ***
## x11          2.492e+05  5.893e+04   4.230 2.38e-05 ***
## x12         -1.158e-04  1.495e-04  -0.775 0.438487    
## x13          1.156e-04  5.979e-05   1.934 0.053179 .  
## x14         -4.011e-04  2.565e-04  -1.563 0.118033    
## x15          2.916e-04  2.444e-04   1.193 0.232873    
## x16          7.755e-04  1.700e-04   4.563 5.16e-06 ***
## x17          1.490e-03  2.570e-04   5.799 7.04e-09 ***
## x19          2.193e-04  1.310e-04   1.673 0.094330 .  
## x20         -7.691e-04  9.129e-04  -0.842 0.399556    
## x21          1.318e-04  3.344e-05   3.941 8.23e-05 ***
## x22         -7.568e-04  2.743e-04  -2.759 0.005810 ** 
## x23          1.472e-04  2.604e-04   0.565 0.571800    
## stat1       -7.547e-05  1.970e-04  -0.383 0.701657    
## stat2       -1.134e-04  1.955e-04  -0.580 0.561818    
## stat3        5.369e-04  1.978e-04   2.714 0.006678 ** 
## stat4       -5.086e-04  1.988e-04  -2.558 0.010541 *  
## stat5       -2.534e-04  1.978e-04  -1.281 0.200102    
## stat6       -3.530e-04  1.963e-04  -1.798 0.072176 .  
## stat7       -1.038e-04  1.969e-04  -0.527 0.598257    
## stat8       -9.570e-05  1.970e-04  -0.486 0.627222    
## stat9        7.079e-05  1.971e-04   0.359 0.719441    
## stat10      -2.187e-04  1.964e-04  -1.114 0.265512    
## stat11      -1.377e-04  1.984e-04  -0.694 0.487457    
## stat12       1.589e-04  1.959e-04   0.811 0.417249    
## stat13      -4.785e-04  1.959e-04  -2.442 0.014629 *  
## stat14      -8.648e-04  1.954e-04  -4.425 9.83e-06 ***
## stat15      -4.150e-04  1.957e-04  -2.121 0.033978 *  
## stat16      -1.770e-04  1.956e-04  -0.905 0.365627    
## stat17      -1.384e-04  1.947e-04  -0.711 0.477015    
## stat18      -2.375e-04  1.947e-04  -1.220 0.222438    
## stat19       3.248e-04  1.978e-04   1.642 0.100597    
## stat20      -1.129e-04  1.956e-04  -0.577 0.563779    
## stat21      -5.488e-05  1.973e-04  -0.278 0.780914    
## stat22      -3.026e-04  1.975e-04  -1.532 0.125476    
## stat23       7.229e-04  1.962e-04   3.685 0.000231 ***
## stat24      -3.859e-04  1.966e-04  -1.963 0.049694 *  
## stat25      -1.443e-04  1.957e-04  -0.737 0.461129    
## stat26      -2.538e-04  1.973e-04  -1.286 0.198489    
## stat27       2.993e-04  1.974e-04   1.517 0.129430    
## stat28      -9.536e-06  1.971e-04  -0.048 0.961424    
## stat29      -8.771e-05  1.988e-04  -0.441 0.659084    
## stat30       1.057e-04  1.979e-04   0.534 0.593221    
## stat31       9.630e-05  1.981e-04   0.486 0.626952    
## stat32       5.959e-05  1.977e-04   0.301 0.763091    
## stat33      -3.489e-04  1.966e-04  -1.774 0.076116 .  
## stat34       2.825e-04  1.964e-04   1.438 0.150480    
## stat35      -6.006e-04  1.970e-04  -3.048 0.002312 ** 
## stat36      -4.622e-05  1.957e-04  -0.236 0.813280    
## stat37      -9.428e-05  1.993e-04  -0.473 0.636113    
## stat38       4.888e-04  1.971e-04   2.480 0.013166 *  
## stat39      -4.609e-04  1.955e-04  -2.358 0.018410 *  
## stat40      -4.327e-05  1.970e-04  -0.220 0.826174    
## stat41      -2.629e-04  1.959e-04  -1.342 0.179577    
## stat42      -3.969e-04  1.966e-04  -2.019 0.043549 *  
## stat43      -3.109e-04  1.987e-04  -1.564 0.117780    
## stat44       1.290e-04  1.975e-04   0.653 0.513700    
## stat45      -2.767e-04  1.963e-04  -1.409 0.158808    
## stat46       2.261e-04  1.970e-04   1.147 0.251231    
## stat47       1.141e-04  1.985e-04   0.575 0.565479    
## stat48       1.641e-04  1.962e-04   0.837 0.402758    
## stat49       5.132e-05  1.957e-04   0.262 0.793112    
## stat50       2.903e-04  1.960e-04   1.481 0.138686    
## stat51       3.170e-04  1.961e-04   1.617 0.105989    
## stat52      -3.319e-05  1.975e-04  -0.168 0.866589    
## stat53      -3.031e-05  1.986e-04  -0.153 0.878696    
## stat54      -5.294e-04  1.984e-04  -2.669 0.007632 ** 
## stat55       2.381e-04  1.947e-04   1.223 0.221440    
## stat56      -3.605e-05  1.958e-04  -0.184 0.853926    
## stat57       1.108e-05  1.961e-04   0.056 0.954961    
## stat58      -2.420e-04  1.948e-04  -1.242 0.214127    
## stat59       2.476e-04  1.959e-04   1.264 0.206390    
## stat60       6.687e-04  1.979e-04   3.379 0.000731 ***
## stat61      -8.243e-05  1.962e-04  -0.420 0.674345    
## stat62      -4.589e-04  1.969e-04  -2.330 0.019827 *  
## stat63       1.908e-04  1.968e-04   0.969 0.332555    
## stat64      -2.814e-06  1.962e-04  -0.014 0.988555    
## stat65      -1.279e-04  1.973e-04  -0.648 0.516904    
## stat66       1.005e-04  1.984e-04   0.506 0.612546    
## stat67       1.622e-04  1.972e-04   0.823 0.410792    
## stat68      -1.712e-04  1.972e-04  -0.868 0.385498    
## stat69      -8.333e-05  1.966e-04  -0.424 0.671700    
## stat70       1.931e-04  1.959e-04   0.985 0.324451    
## stat71       1.889e-04  1.959e-04   0.964 0.335139    
## stat72       2.400e-04  1.990e-04   1.206 0.227813    
## stat73      -1.953e-05  1.975e-04  -0.099 0.921221    
## stat74       4.592e-05  1.975e-04   0.233 0.816109    
## stat75       2.023e-04  1.979e-04   1.022 0.306603    
## stat76       1.106e-05  1.971e-04   0.056 0.955282    
## stat77       1.716e-04  1.978e-04   0.868 0.385525    
## stat78      -4.004e-04  1.972e-04  -2.030 0.042355 *  
## stat79       1.110e-04  1.963e-04   0.565 0.571957    
## stat80       3.113e-04  1.971e-04   1.579 0.114293    
## stat81       1.799e-04  1.969e-04   0.914 0.360866    
## stat82       1.298e-04  1.969e-04   0.659 0.509662    
## stat83       8.100e-05  1.963e-04   0.413 0.679924    
## stat84      -9.116e-05  1.958e-04  -0.466 0.641566    
## stat85      -2.599e-04  1.970e-04  -1.319 0.187091    
## stat86       4.105e-04  1.972e-04   2.081 0.037439 *  
## stat87      -1.930e-04  1.975e-04  -0.977 0.328452    
## stat88      -2.091e-05  1.953e-04  -0.107 0.914712    
## stat89      -1.144e-04  1.969e-04  -0.581 0.561381    
## stat90      -3.778e-04  1.969e-04  -1.919 0.055011 .  
## stat91      -4.045e-04  1.945e-04  -2.080 0.037587 *  
## stat92      -2.420e-04  1.965e-04  -1.231 0.218192    
## stat93       1.385e-04  1.993e-04   0.695 0.487245    
## stat94       1.027e-04  1.964e-04   0.523 0.600921    
## stat95       2.770e-04  1.961e-04   1.413 0.157831    
## stat96      -1.981e-04  1.967e-04  -1.007 0.313905    
## stat97       2.285e-04  1.951e-04   1.171 0.241539    
## stat98       3.324e-03  1.943e-04  17.105  < 2e-16 ***
## stat99       3.533e-04  1.983e-04   1.782 0.074789 .  
## stat100      4.570e-04  1.972e-04   2.317 0.020521 *  
## stat101     -9.718e-05  1.977e-04  -0.492 0.623002    
## stat102      4.805e-05  1.963e-04   0.245 0.806660    
## stat103     -1.484e-04  1.999e-04  -0.742 0.457819    
## stat104     -3.565e-04  1.954e-04  -1.825 0.068108 .  
## stat105      3.237e-04  1.952e-04   1.658 0.097366 .  
## stat106     -2.432e-04  1.955e-04  -1.244 0.213512    
## stat107     -1.904e-04  1.963e-04  -0.970 0.332216    
## stat108     -1.317e-04  1.971e-04  -0.669 0.503821    
## stat109      6.734e-05  1.970e-04   0.342 0.732498    
## stat110     -3.308e-03  1.949e-04 -16.977  < 2e-16 ***
## stat111      2.198e-05  1.966e-04   0.112 0.910989    
## stat112      4.976e-05  1.982e-04   0.251 0.801751    
## stat113     -1.682e-04  1.983e-04  -0.848 0.396323    
## stat114      5.322e-04  1.969e-04   2.703 0.006894 ** 
## stat115      2.002e-04  1.957e-04   1.023 0.306454    
## stat116      2.329e-04  1.973e-04   1.180 0.237895    
## stat117      1.828e-04  1.967e-04   0.929 0.352749    
## stat118      6.526e-05  1.955e-04   0.334 0.738515    
## stat119      3.011e-04  1.975e-04   1.525 0.127307    
## stat120     -1.319e-04  1.946e-04  -0.678 0.497827    
## stat121     -1.145e-04  1.972e-04  -0.581 0.561493    
## stat122     -9.039e-07  1.966e-04  -0.005 0.996332    
## stat123      2.336e-04  1.991e-04   1.173 0.240778    
## stat124     -2.514e-04  1.963e-04  -1.281 0.200337    
## stat125      3.119e-04  1.980e-04   1.576 0.115189    
## stat126      4.047e-04  1.961e-04   2.064 0.039054 *  
## stat127     -3.627e-05  1.966e-04  -0.185 0.853628    
## stat128     -5.350e-04  1.963e-04  -2.725 0.006453 ** 
## stat129     -1.973e-04  1.954e-04  -1.010 0.312695    
## stat130      1.279e-04  1.973e-04   0.648 0.516894    
## stat131      1.782e-05  1.971e-04   0.090 0.927945    
## stat132     -1.443e-04  1.958e-04  -0.737 0.461014    
## stat133      1.338e-04  1.970e-04   0.679 0.496945    
## stat134      9.696e-05  1.953e-04   0.497 0.619546    
## stat135     -7.397e-05  1.980e-04  -0.374 0.708696    
## stat136     -8.079e-05  1.972e-04  -0.410 0.682056    
## stat137      3.162e-04  1.956e-04   1.616 0.106104    
## stat138      4.298e-05  1.966e-04   0.219 0.826913    
## stat139      2.152e-04  1.976e-04   1.089 0.276196    
## stat140      1.620e-04  1.954e-04   0.829 0.407178    
## stat141      4.780e-04  1.945e-04   2.457 0.014038 *  
## stat142     -1.149e-05  1.988e-04  -0.058 0.953902    
## stat143      2.501e-04  1.973e-04   1.267 0.205042    
## stat144      5.343e-04  1.956e-04   2.731 0.006340 ** 
## stat145      3.496e-05  1.992e-04   0.176 0.860684    
## stat146     -5.035e-04  1.986e-04  -2.535 0.011282 *  
## stat147     -4.711e-04  1.981e-04  -2.379 0.017405 *  
## stat148     -3.531e-04  1.944e-04  -1.817 0.069326 .  
## stat149     -5.253e-04  1.987e-04  -2.644 0.008213 ** 
## stat150     -8.915e-05  1.976e-04  -0.451 0.651806    
## stat151      6.291e-06  1.992e-04   0.032 0.974806    
## stat152      9.890e-07  1.952e-04   0.005 0.995958    
## stat153      1.387e-04  1.987e-04   0.698 0.485192    
## stat154      1.716e-04  1.990e-04   0.863 0.388413    
## stat155      2.613e-04  1.970e-04   1.326 0.184800    
## stat156      4.299e-04  1.972e-04   2.180 0.029278 *  
## stat157     -8.384e-05  1.956e-04  -0.429 0.668245    
## stat158      3.115e-04  1.994e-04   1.562 0.118340    
## stat159     -2.136e-04  1.962e-04  -1.089 0.276402    
## stat160      2.673e-04  1.979e-04   1.351 0.176857    
## stat161      8.528e-05  1.979e-04   0.431 0.666521    
## stat162     -5.591e-05  1.941e-04  -0.288 0.773337    
## stat163      7.678e-05  1.990e-04   0.386 0.699673    
## stat164     -6.012e-06  1.980e-04  -0.030 0.975784    
## stat165      3.829e-05  1.960e-04   0.195 0.845130    
## stat166     -1.489e-04  1.946e-04  -0.765 0.444305    
## stat167     -2.455e-04  1.967e-04  -1.248 0.212052    
## stat168      2.205e-06  1.960e-04   0.011 0.991023    
## stat169      1.986e-04  1.978e-04   1.004 0.315463    
## stat170     -2.179e-04  1.961e-04  -1.111 0.266428    
## stat171      1.952e-05  1.977e-04   0.099 0.921327    
## stat172      4.937e-04  1.955e-04   2.525 0.011604 *  
## stat173     -1.631e-04  1.979e-04  -0.824 0.410010    
## stat174     -2.071e-05  1.968e-04  -0.105 0.916190    
## stat175     -2.949e-04  1.974e-04  -1.494 0.135204    
## stat176     -2.171e-04  1.965e-04  -1.105 0.269104    
## stat177     -3.331e-04  1.971e-04  -1.690 0.091123 .  
## stat178      2.132e-04  1.987e-04   1.073 0.283289    
## stat179      1.528e-04  1.962e-04   0.779 0.436144    
## stat180      2.659e-04  1.960e-04   1.356 0.175000    
## stat181      1.914e-04  1.980e-04   0.967 0.333565    
## stat182      2.934e-04  1.973e-04   1.487 0.137059    
## stat183      2.828e-04  1.964e-04   1.440 0.149894    
## stat184      2.131e-04  1.977e-04   1.078 0.281243    
## stat185      1.447e-05  1.961e-04   0.074 0.941174    
## stat186      2.248e-04  1.976e-04   1.138 0.255272    
## stat187     -4.821e-04  1.962e-04  -2.457 0.014053 *  
## stat188      3.014e-04  1.958e-04   1.540 0.123682    
## stat189     -2.138e-04  1.977e-04  -1.081 0.279658    
## stat190     -8.102e-06  1.957e-04  -0.041 0.966977    
## stat191     -3.228e-04  1.962e-04  -1.645 0.100022    
## stat192     -1.023e-05  1.995e-04  -0.051 0.959117    
## stat193      1.409e-04  1.992e-04   0.707 0.479336    
## stat194     -7.896e-05  1.968e-04  -0.401 0.688300    
## stat195     -8.602e-06  1.976e-04  -0.044 0.965276    
## stat196      6.474e-06  1.995e-04   0.032 0.974109    
## stat197     -1.091e-05  1.958e-04  -0.056 0.955564    
## stat198     -3.880e-04  1.965e-04  -1.974 0.048427 *  
## stat199      3.674e-04  1.952e-04   1.882 0.059918 .  
## stat200     -1.573e-04  1.951e-04  -0.806 0.420038    
## stat201      3.990e-05  1.962e-04   0.203 0.838861    
## stat202     -3.200e-05  1.992e-04  -0.161 0.872376    
## stat203      1.888e-04  1.954e-04   0.966 0.333856    
## stat204     -3.321e-04  1.954e-04  -1.700 0.089210 .  
## stat205      3.263e-05  1.951e-04   0.167 0.867225    
## stat206     -1.969e-05  1.977e-04  -0.100 0.920686    
## stat207      1.945e-04  1.965e-04   0.990 0.322272    
## stat208      2.993e-04  1.982e-04   1.510 0.130982    
## stat209     -2.318e-04  1.966e-04  -1.179 0.238438    
## stat210     -2.467e-04  1.969e-04  -1.253 0.210286    
## stat211     -7.122e-05  1.963e-04  -0.363 0.716755    
## stat212      5.328e-07  1.980e-04   0.003 0.997853    
## stat213     -1.250e-04  1.980e-04  -0.631 0.527982    
## stat214     -3.081e-04  1.979e-04  -1.556 0.119657    
## stat215     -2.796e-04  1.970e-04  -1.420 0.155787    
## stat216     -2.625e-04  1.965e-04  -1.336 0.181617    
## stat217      2.287e-04  1.971e-04   1.160 0.245993    
## sqrt.x18     2.671e-02  7.486e-04  35.684  < 2e-16 ***
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## Residual standard error: 0.02521 on 5467 degrees of freedom
## Multiple R-squared:  0.3835, Adjusted R-squared:  0.3565 
## F-statistic: 14.17 on 240 and 5467 DF,  p-value: < 2.2e-16
cd.full2 = plot.diagnostics(model.full2, data.train2)

## [1] "Number of data points that have Cook's D > 4/n: 290"
## [1] "Number of data points that have Cook's D > 1: 0"
# much more normal residuals than before. 
# Checking to see if distributions are different and if so whcih variables
# High Leverage Plot 
plotData = data.train %>% 
  rownames_to_column() %>%
  mutate(type=ifelse(rowname %in% high.cd,'High','Normal')) %>%
  dplyr::select(type,target=one_of(label.names))

ggplot(data=plotData, aes(x=type,y=target)) +
  geom_boxplot(fill='light blue',outlier.shape=NA) +
  scale_y_continuous(name="Target Variable Values") +
  theme_light() +
  ggtitle('Distribution of High Leverage Points and Normal  Points')

plotData = data.train %>% 
  rownames_to_column() %>%
  mutate(type=ifelse(rowname %in% high.cd,'High','Normal')) %>%
  dplyr::select(type,one_of(feature.names))
# 2 sample t-tests
comp.test = lapply(dplyr::select(plotData, one_of(feature.names)), function(x) t.test(x ~ plotData$type, var.equal = TRUE)) 

sig.comp = list.filter(comp.test, p.value < 0.05)
sapply(sig.comp, function(x) x[['p.value']])
##           x4        stat4       stat38       stat74       stat98 
## 2.951218e-03 2.255000e-02 2.096412e-02 4.118559e-03 2.599100e-07 
##      stat110      stat128      stat145      stat156      stat214 
## 7.088790e-04 1.487188e-02 2.074713e-02 2.574454e-02 5.600066e-03 
##     sqrt.x18 
## 1.053180e-02
# Distribution (box) Plots
mm = melt(plotData, id=c('type'))

ggplot(mm) +
  geom_boxplot(aes(x=type, y=value))+
  facet_wrap(~variable, ncol=10, scales = 'free') +
  ggtitle('Distribution of High Leverage Points and Normal Points')

ggsave('comparison.jpeg', width =50, height = 400, units='cm',limitsize = FALSE)

Grand Means Model

model.null = lm(grand.mean.formula, data.train)
model.null2 = lm(grand.mean.formula, data.train2)

Variable Selection

Basic: http://www.stat.columbia.edu/~martin/W2024/R10.pdf Cross Validation + Other Metrics: http://www.sthda.com/english/articles/37-model-selection-essentials-in-r/154-stepwise-regression-essentials-in-r/

Forward Selection (w/ full train)

Train

if (algo.forward == TRUE){
  t1 = Sys.time()
  
  model.forward = step(model.null, scope=list(lower=model.null, upper=model.full), direction="forward", trace = 0)
  print(summary(model.forward))

  t2 = Sys.time()
  print (paste("Time taken for Forward Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.forward, data.train)
}

Test

if (algo.forward == TRUE){
  test.model(model.forward, data.test, "Forward Selection")
}

Forward Selection (w/ filtered train)

Train

if (algo.forward == TRUE){
  t1 = Sys.time()
  
  model.forward2 = step(model.null2, scope=list(lower=model.null2, upper=model.full2), direction="forward", trace = 0)
  print(summary(model.forward2))

  t2 = Sys.time()
  print (paste("Time taken for Forward Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.forward2, data.train2)
}

Test

if (algo.forward == TRUE){
  test.model(model.forward2, data.test, "Forward Selection (2)")
}

Forward Selection with CV (w/ full train)

Train

if (algo.forward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   , data = data.train
                                   , method = "leapForward"
                                   , feature.names = feature.names)
  model.forward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 7 on full training set
##     nvmax       RMSE  Rsquared        MAE       RMSESD RsquaredSD
## 1       1 0.03410610 0.1149630 0.02657577 0.0012861157 0.02115810
## 2       2 0.03328304 0.1574261 0.02582705 0.0011082962 0.02564652
## 3       3 0.03267992 0.1875344 0.02522129 0.0010042512 0.02555459
## 4       4 0.03218662 0.2115343 0.02451101 0.0010005851 0.02634272
## 5       5 0.03186632 0.2271994 0.02432100 0.0009679660 0.02729495
## 6       6 0.03185480 0.2277527 0.02432260 0.0009428484 0.02753486
## 7       7 0.03174204 0.2329557 0.02426652 0.0009365345 0.02626930
## 8       8 0.03177661 0.2313360 0.02430354 0.0009495410 0.02509086
## 9       9 0.03177342 0.2315882 0.02429654 0.0009401013 0.02649936
## 10     10 0.03174421 0.2330436 0.02428987 0.0009336330 0.02724717
## 11     11 0.03175240 0.2327719 0.02430959 0.0009426783 0.02703386
## 12     12 0.03174790 0.2330175 0.02429329 0.0009361445 0.02713821
## 13     13 0.03176566 0.2322273 0.02431153 0.0009301109 0.02725054
## 14     14 0.03176252 0.2324098 0.02431077 0.0009013124 0.02867587
## 15     15 0.03175213 0.2328987 0.02430099 0.0009248676 0.02826833
## 16     16 0.03177382 0.2318280 0.02431036 0.0009445884 0.02676660
## 17     17 0.03178911 0.2311487 0.02432604 0.0009658569 0.02635184
## 18     18 0.03177643 0.2317125 0.02431717 0.0009793686 0.02490692
## 19     19 0.03175964 0.2325262 0.02429739 0.0009456329 0.02627921
## 20     20 0.03175338 0.2328159 0.02429680 0.0009786020 0.02653806
## 21     21 0.03174686 0.2331350 0.02429265 0.0009894709 0.02546913
## 22     22 0.03176649 0.2322184 0.02430019 0.0010005594 0.02502278
## 23     23 0.03176655 0.2322583 0.02430402 0.0009852345 0.02545103
## 24     24 0.03178902 0.2312438 0.02432037 0.0009709799 0.02507217
## 25     25 0.03179492 0.2309855 0.02432014 0.0009811234 0.02505781
## 26     26 0.03178208 0.2316431 0.02431141 0.0009527559 0.02605748
## 27     27 0.03178311 0.2316538 0.02430944 0.0009280740 0.02642813
## 28     28 0.03180855 0.2305068 0.02432795 0.0009411352 0.02680483
## 29     29 0.03181247 0.2303779 0.02433534 0.0009577700 0.02708067
## 30     30 0.03181153 0.2305138 0.02433202 0.0009699271 0.02718517
## 31     31 0.03181772 0.2302175 0.02433785 0.0009741382 0.02736658
## 32     32 0.03184322 0.2290545 0.02435844 0.0009838733 0.02646426
## 33     33 0.03184302 0.2290742 0.02436060 0.0009968974 0.02635739
## 34     34 0.03184450 0.2290212 0.02436921 0.0009797616 0.02609372
## 35     35 0.03185779 0.2284280 0.02437936 0.0009851665 0.02444185
## 36     36 0.03187608 0.2276434 0.02439540 0.0009810612 0.02421871
## 37     37 0.03188824 0.2271375 0.02439858 0.0009854518 0.02338710
## 38     38 0.03189828 0.2266973 0.02440059 0.0009976156 0.02343964
## 39     39 0.03190292 0.2265045 0.02440543 0.0010080669 0.02323980
## 40     40 0.03189997 0.2267181 0.02440931 0.0010068500 0.02319182
## 41     41 0.03189848 0.2268175 0.02441118 0.0010091399 0.02303736
## 42     42 0.03191460 0.2261415 0.02443213 0.0010199448 0.02338456
## 43     43 0.03192977 0.2254941 0.02444085 0.0010118629 0.02346942
## 44     44 0.03193267 0.2254000 0.02444851 0.0010044884 0.02338520
## 45     45 0.03193616 0.2252877 0.02445102 0.0009973765 0.02400841
## 46     46 0.03193405 0.2254265 0.02445603 0.0009882143 0.02454566
## 47     47 0.03193728 0.2252663 0.02446391 0.0009950543 0.02428079
## 48     48 0.03194665 0.2249123 0.02446199 0.0009887522 0.02445599
## 49     49 0.03195804 0.2244312 0.02447236 0.0009910758 0.02471262
## 50     50 0.03195650 0.2245477 0.02447378 0.0009856750 0.02497266
## 51     51 0.03196159 0.2243297 0.02448205 0.0009940737 0.02482403
## 52     52 0.03196289 0.2242614 0.02447613 0.0010036587 0.02504229
## 53     53 0.03195757 0.2245033 0.02447106 0.0010117905 0.02475876
## 54     54 0.03195437 0.2246278 0.02446148 0.0010232917 0.02432849
## 55     55 0.03195577 0.2245963 0.02446257 0.0010207142 0.02420678
## 56     56 0.03196041 0.2244640 0.02446725 0.0010321496 0.02469363
## 57     57 0.03196378 0.2243619 0.02446573 0.0010160059 0.02507184
## 58     58 0.03196930 0.2241456 0.02447356 0.0010082554 0.02515475
## 59     59 0.03197609 0.2238863 0.02448818 0.0010116584 0.02565957
## 60     60 0.03197623 0.2239077 0.02448726 0.0009923077 0.02562587
## 61     61 0.03197356 0.2240548 0.02448375 0.0009935484 0.02567438
## 62     62 0.03196472 0.2244544 0.02447808 0.0009947980 0.02602750
## 63     63 0.03197566 0.2240089 0.02449004 0.0009868922 0.02599832
## 64     64 0.03199182 0.2233471 0.02449905 0.0009751642 0.02612664
## 65     65 0.03198888 0.2234645 0.02449935 0.0009855925 0.02649073
## 66     66 0.03199523 0.2232116 0.02451188 0.0009812827 0.02689908
## 67     67 0.03198776 0.2235500 0.02449831 0.0009814838 0.02679456
## 68     68 0.03199896 0.2230752 0.02451332 0.0009912635 0.02680591
## 69     69 0.03200049 0.2230180 0.02451345 0.0009977180 0.02682632
## 70     70 0.03201164 0.2225768 0.02451442 0.0009940761 0.02726234
## 71     71 0.03202129 0.2222111 0.02451776 0.0009843885 0.02775224
## 72     72 0.03202563 0.2220653 0.02452262 0.0009850751 0.02792345
## 73     73 0.03204676 0.2211429 0.02453492 0.0009933874 0.02783449
## 74     74 0.03204929 0.2210783 0.02453199 0.0009922574 0.02818577
## 75     75 0.03205565 0.2208305 0.02453178 0.0010011223 0.02788161
## 76     76 0.03205334 0.2209760 0.02453181 0.0010034120 0.02755142
## 77     77 0.03205503 0.2208989 0.02452906 0.0009933016 0.02757856
## 78     78 0.03205164 0.2210512 0.02453032 0.0009870924 0.02728188
## 79     79 0.03205331 0.2209749 0.02453541 0.0009953131 0.02738509
## 80     80 0.03207144 0.2201838 0.02454733 0.0009927354 0.02728707
## 81     81 0.03207381 0.2200840 0.02455528 0.0009827646 0.02757541
## 82     82 0.03207756 0.2199669 0.02454738 0.0009881664 0.02814396
## 83     83 0.03208927 0.2194606 0.02455404 0.0009868763 0.02803082
## 84     84 0.03208613 0.2195480 0.02454829 0.0009916769 0.02775853
## 85     85 0.03209195 0.2192816 0.02456166 0.0009922083 0.02785167
## 86     86 0.03208971 0.2193879 0.02455435 0.0009955592 0.02755396
## 87     87 0.03209958 0.2190192 0.02456120 0.0009908167 0.02800906
## 88     88 0.03210275 0.2189107 0.02456522 0.0009814057 0.02797314
## 89     89 0.03210411 0.2188411 0.02456811 0.0009794581 0.02782565
## 90     90 0.03210744 0.2187327 0.02456923 0.0009830991 0.02792252
## 91     91 0.03210329 0.2189450 0.02456455 0.0009804345 0.02767122
## 92     92 0.03210971 0.2186926 0.02456794 0.0009836154 0.02768268
## 93     93 0.03211038 0.2187165 0.02457156 0.0009831213 0.02820087
## 94     94 0.03211049 0.2187488 0.02457162 0.0009706660 0.02800638
## 95     95 0.03210658 0.2189634 0.02457076 0.0009687649 0.02829526
## 96     96 0.03210145 0.2191791 0.02457009 0.0009604467 0.02814503
## 97     97 0.03210086 0.2191903 0.02457377 0.0009629273 0.02782940
## 98     98 0.03210986 0.2188088 0.02457730 0.0009700764 0.02759556
## 99     99 0.03210583 0.2189933 0.02457608 0.0009747238 0.02728754
## 100   100 0.03211047 0.2188252 0.02457988 0.0009674759 0.02757551
## 101   101 0.03211634 0.2186142 0.02458643 0.0009652755 0.02783298
## 102   102 0.03211125 0.2188466 0.02457981 0.0009633215 0.02785471
## 103   103 0.03211827 0.2185675 0.02458321 0.0009681009 0.02790878
## 104   104 0.03211846 0.2185911 0.02458794 0.0009667362 0.02792534
## 105   105 0.03211896 0.2186209 0.02458961 0.0009756484 0.02798272
## 106   106 0.03212722 0.2182607 0.02459851 0.0009786752 0.02806739
## 107   107 0.03212924 0.2181784 0.02459697 0.0009806778 0.02806748
## 108   108 0.03212780 0.2182626 0.02459662 0.0009796664 0.02796011
## 109   109 0.03213865 0.2178166 0.02460437 0.0009862512 0.02758793
## 110   110 0.03214478 0.2175733 0.02460745 0.0009857648 0.02773176
## 111   111 0.03214810 0.2174479 0.02461015 0.0009807094 0.02784171
## 112   112 0.03214591 0.2175556 0.02460944 0.0009806207 0.02758687
## 113   113 0.03214832 0.2174673 0.02461167 0.0009840247 0.02769451
## 114   114 0.03214527 0.2176010 0.02461114 0.0009834985 0.02787513
## 115   115 0.03215373 0.2172432 0.02462180 0.0009776194 0.02796084
## 116   116 0.03215087 0.2173568 0.02461325 0.0009827046 0.02829031
## 117   117 0.03215802 0.2170614 0.02462091 0.0009819193 0.02775306
## 118   118 0.03216312 0.2168421 0.02462799 0.0009829035 0.02795487
## 119   119 0.03216474 0.2167672 0.02462885 0.0009873727 0.02809556
## 120   120 0.03216157 0.2168957 0.02462492 0.0009974608 0.02805021
## 121   121 0.03216470 0.2167609 0.02462647 0.0010010258 0.02804458
## 122   122 0.03216597 0.2167287 0.02462556 0.0010033535 0.02808583
## 123   123 0.03216481 0.2168006 0.02462378 0.0010009704 0.02810225
## 124   124 0.03216926 0.2166383 0.02462855 0.0009996470 0.02792532
## 125   125 0.03216932 0.2166353 0.02463203 0.0009945964 0.02797592
## 126   126 0.03217393 0.2164577 0.02463437 0.0009993933 0.02806645
## 127   127 0.03216744 0.2167382 0.02462955 0.0010042442 0.02786594
## 128   128 0.03217332 0.2165084 0.02463534 0.0010014789 0.02764626
## 129   129 0.03217859 0.2163088 0.02464352 0.0009975475 0.02786191
## 130   130 0.03218123 0.2162331 0.02464338 0.0009939854 0.02773141
## 131   131 0.03218162 0.2162365 0.02464373 0.0009989299 0.02793846
## 132   132 0.03218107 0.2162539 0.02464583 0.0010065093 0.02794963
## 133   133 0.03218022 0.2162953 0.02464673 0.0010052839 0.02767312
## 134   134 0.03218281 0.2162168 0.02464695 0.0010068244 0.02777557
## 135   135 0.03218303 0.2162312 0.02464624 0.0010035570 0.02776437
## 136   136 0.03218372 0.2162181 0.02464546 0.0010109204 0.02760551
## 137   137 0.03218635 0.2161092 0.02464751 0.0010155383 0.02753125
## 138   138 0.03217989 0.2163877 0.02464154 0.0010124750 0.02735140
## 139   139 0.03218060 0.2163518 0.02463871 0.0010175708 0.02722741
## 140   140 0.03217900 0.2164062 0.02463787 0.0010208253 0.02694497
## 141   141 0.03218386 0.2162214 0.02464259 0.0010205205 0.02697901
## 142   142 0.03219014 0.2159505 0.02465035 0.0010217486 0.02695589
## 143   143 0.03219280 0.2158616 0.02464893 0.0010192818 0.02656880
## 144   144 0.03219968 0.2155687 0.02465617 0.0010155572 0.02670750
## 145   145 0.03220238 0.2154839 0.02465871 0.0010172425 0.02702679
## 146   146 0.03220674 0.2153003 0.02465636 0.0010157900 0.02690586
## 147   147 0.03220568 0.2153226 0.02465336 0.0010175663 0.02676567
## 148   148 0.03220617 0.2153211 0.02464909 0.0010206365 0.02671193
## 149   149 0.03220728 0.2152768 0.02464984 0.0010275861 0.02685075
## 150   150 0.03220983 0.2151541 0.02465201 0.0010273610 0.02668501
## 151   151 0.03221399 0.2149944 0.02465438 0.0010329767 0.02681295
## 152   152 0.03221655 0.2148799 0.02465447 0.0010320598 0.02673990
## 153   153 0.03222109 0.2146883 0.02465798 0.0010364166 0.02692746
## 154   154 0.03222346 0.2145837 0.02465633 0.0010327055 0.02684972
## 155   155 0.03222458 0.2145428 0.02465876 0.0010283430 0.02698826
## 156   156 0.03223055 0.2142681 0.02466294 0.0010267949 0.02682007
## 157   157 0.03223244 0.2141778 0.02466335 0.0010283627 0.02681809
## 158   158 0.03223493 0.2141068 0.02466715 0.0010250022 0.02689644
## 159   159 0.03222989 0.2143191 0.02466452 0.0010339208 0.02697194
## 160   160 0.03223194 0.2142259 0.02466574 0.0010371214 0.02673095
## 161   161 0.03223099 0.2142843 0.02466311 0.0010366664 0.02700724
## 162   162 0.03222699 0.2144568 0.02465785 0.0010328488 0.02706012
## 163   163 0.03222562 0.2145108 0.02465966 0.0010363295 0.02693475
## 164   164 0.03222389 0.2145731 0.02465859 0.0010297172 0.02687916
## 165   165 0.03221732 0.2148746 0.02465428 0.0010289913 0.02702436
## 166   166 0.03221561 0.2149547 0.02465047 0.0010223334 0.02692227
## 167   167 0.03221863 0.2148427 0.02465484 0.0010242736 0.02695445
## 168   168 0.03221766 0.2148958 0.02465440 0.0010265455 0.02701275
## 169   169 0.03221879 0.2148565 0.02465630 0.0010298504 0.02717500
## 170   170 0.03222029 0.2147922 0.02465920 0.0010309931 0.02718452
## 171   171 0.03221949 0.2148474 0.02466131 0.0010319546 0.02712960
## 172   172 0.03222412 0.2146407 0.02466734 0.0010307912 0.02697888
## 173   173 0.03222168 0.2147597 0.02466679 0.0010283618 0.02697704
## 174   174 0.03222361 0.2146793 0.02466769 0.0010297597 0.02693682
## 175   175 0.03222688 0.2145505 0.02466627 0.0010305452 0.02698883
## 176   176 0.03223101 0.2143710 0.02467102 0.0010332428 0.02686259
## 177   177 0.03223419 0.2142410 0.02467619 0.0010330784 0.02684578
## 178   178 0.03223196 0.2143250 0.02467534 0.0010308731 0.02672281
## 179   179 0.03223089 0.2143774 0.02467329 0.0010305792 0.02676973
## 180   180 0.03223026 0.2143959 0.02467315 0.0010323724 0.02685293
## 181   181 0.03223327 0.2142669 0.02467612 0.0010346800 0.02693059
## 182   182 0.03223217 0.2143201 0.02467577 0.0010336938 0.02713226
## 183   183 0.03222900 0.2144554 0.02467361 0.0010341257 0.02708243
## 184   184 0.03223128 0.2143541 0.02467655 0.0010369733 0.02715954
## 185   185 0.03222617 0.2145783 0.02467325 0.0010385848 0.02728563
## 186   186 0.03222466 0.2146484 0.02467174 0.0010378095 0.02729797
## 187   187 0.03222344 0.2146895 0.02467223 0.0010372334 0.02733961
## 188   188 0.03222582 0.2145900 0.02467648 0.0010381219 0.02727544
## 189   189 0.03222725 0.2145114 0.02467832 0.0010422847 0.02720718
## 190   190 0.03222934 0.2144297 0.02468033 0.0010440887 0.02729145
## 191   191 0.03222885 0.2144459 0.02467938 0.0010426642 0.02733943
## 192   192 0.03222796 0.2144754 0.02467883 0.0010427452 0.02739401
## 193   193 0.03222977 0.2143947 0.02467996 0.0010433118 0.02740576
## 194   194 0.03222980 0.2144016 0.02467980 0.0010424844 0.02748528
## 195   195 0.03222863 0.2144611 0.02467906 0.0010437310 0.02755765
## 196   196 0.03222813 0.2144795 0.02467944 0.0010426368 0.02761224
## 197   197 0.03222894 0.2144380 0.02468128 0.0010438590 0.02752779
## 198   198 0.03223124 0.2143440 0.02468343 0.0010437758 0.02756062
## 199   199 0.03222885 0.2144447 0.02468117 0.0010448083 0.02760357
## 200   200 0.03223026 0.2143780 0.02468289 0.0010481938 0.02758377
## 201   201 0.03222937 0.2144093 0.02468108 0.0010476185 0.02764257
## 202   202 0.03223170 0.2143143 0.02468308 0.0010448119 0.02759767
## 203   203 0.03222986 0.2143986 0.02468219 0.0010429669 0.02758904
## 204   204 0.03223128 0.2143422 0.02468494 0.0010437813 0.02748991
## 205   205 0.03223160 0.2143291 0.02468513 0.0010444590 0.02757676
## 206   206 0.03223155 0.2143227 0.02468628 0.0010464766 0.02756180
## 207   207 0.03223266 0.2142757 0.02468721 0.0010461687 0.02757659
## 208   208 0.03223255 0.2142844 0.02468626 0.0010455797 0.02765948
## 209   209 0.03223383 0.2142319 0.02468739 0.0010453155 0.02759553
## 210   210 0.03223324 0.2142530 0.02468755 0.0010457994 0.02754280
## 211   211 0.03223372 0.2142303 0.02468854 0.0010472969 0.02749382
## 212   212 0.03223316 0.2142630 0.02468811 0.0010460692 0.02753992
## 213   213 0.03223363 0.2142432 0.02468894 0.0010461425 0.02750597
## 214   214 0.03223389 0.2142325 0.02468867 0.0010437209 0.02746146
## 215   215 0.03223403 0.2142238 0.02468962 0.0010456569 0.02733998
## 216   216 0.03223412 0.2142200 0.02469021 0.0010437127 0.02725333
## 217   217 0.03223395 0.2142295 0.02468961 0.0010437833 0.02725264
## 218   218 0.03223496 0.2141836 0.02469094 0.0010449065 0.02728730
## 219   219 0.03223496 0.2141848 0.02469090 0.0010438134 0.02724923
## 220   220 0.03223448 0.2142101 0.02469057 0.0010435869 0.02722076
## 221   221 0.03223504 0.2141853 0.02469117 0.0010445416 0.02720648
## 222   222 0.03223526 0.2141694 0.02469290 0.0010456283 0.02720763
## 223   223 0.03223507 0.2141796 0.02469298 0.0010452188 0.02726817
## 224   224 0.03223452 0.2142026 0.02469286 0.0010448990 0.02725018
## 225   225 0.03223502 0.2141819 0.02469298 0.0010445208 0.02725529
## 226   226 0.03223463 0.2141981 0.02469295 0.0010447328 0.02723686
## 227   227 0.03223413 0.2142199 0.02469274 0.0010449003 0.02723661
## 228   228 0.03223320 0.2142582 0.02469156 0.0010457992 0.02721714
## 229   229 0.03223319 0.2142607 0.02469212 0.0010442812 0.02721419
## 230   230 0.03223344 0.2142479 0.02469235 0.0010444791 0.02720711
## 231   231 0.03223373 0.2142371 0.02469240 0.0010450460 0.02721503
## 232   232 0.03223404 0.2142241 0.02469243 0.0010453876 0.02723733
## 233   233 0.03223429 0.2142132 0.02469274 0.0010456495 0.02726316
## 234   234 0.03223392 0.2142269 0.02469274 0.0010457886 0.02725864
## 235   235 0.03223414 0.2142182 0.02469299 0.0010459490 0.02725502
## 236   236 0.03223406 0.2142203 0.02469294 0.0010461860 0.02728085
## 237   237 0.03223403 0.2142210 0.02469284 0.0010459806 0.02725711
## 238   238 0.03223436 0.2142079 0.02469323 0.0010461352 0.02727336
## 239   239 0.03223419 0.2142160 0.02469319 0.0010460914 0.02727348
## 240   240 0.03223413 0.2142186 0.02469321 0.0010461107 0.02727501
##            MAESD
## 1   0.0006039987
## 2   0.0005560515
## 3   0.0005161705
## 4   0.0005030416
## 5   0.0004516171
## 6   0.0004274403
## 7   0.0004386333
## 8   0.0004116784
## 9   0.0004027648
## 10  0.0003804190
## 11  0.0003827175
## 12  0.0003714371
## 13  0.0003718175
## 14  0.0003523685
## 15  0.0003596036
## 16  0.0003688301
## 17  0.0003957368
## 18  0.0004141335
## 19  0.0003935137
## 20  0.0004296221
## 21  0.0004323057
## 22  0.0004296525
## 23  0.0004172385
## 24  0.0004122492
## 25  0.0004154358
## 26  0.0004009508
## 27  0.0003980728
## 28  0.0004058109
## 29  0.0004167466
## 30  0.0004250386
## 31  0.0004337700
## 32  0.0004301885
## 33  0.0004471771
## 34  0.0004379250
## 35  0.0004411262
## 36  0.0004363331
## 37  0.0004392575
## 38  0.0004565744
## 39  0.0004661210
## 40  0.0004693537
## 41  0.0004759417
## 42  0.0004995609
## 43  0.0004991563
## 44  0.0004921434
## 45  0.0004823461
## 46  0.0004753556
## 47  0.0004692500
## 48  0.0004614746
## 49  0.0004599378
## 50  0.0004511619
## 51  0.0004427788
## 52  0.0004630591
## 53  0.0004476089
## 54  0.0004607557
## 55  0.0004626769
## 56  0.0004743938
## 57  0.0004682956
## 58  0.0004556125
## 59  0.0004582658
## 60  0.0004496977
## 61  0.0004355180
## 62  0.0004499623
## 63  0.0004377042
## 64  0.0004329507
## 65  0.0004487659
## 66  0.0004504556
## 67  0.0004448734
## 68  0.0004467243
## 69  0.0004533267
## 70  0.0004525668
## 71  0.0004500066
## 72  0.0004595261
## 73  0.0004659523
## 74  0.0004633336
## 75  0.0004738190
## 76  0.0004710438
## 77  0.0004553563
## 78  0.0004534410
## 79  0.0004623091
## 80  0.0004607528
## 81  0.0004629932
## 82  0.0004774089
## 83  0.0004692723
## 84  0.0004702074
## 85  0.0004685357
## 86  0.0004747136
## 87  0.0004754254
## 88  0.0004656919
## 89  0.0004571532
## 90  0.0004593809
## 91  0.0004576292
## 92  0.0004618782
## 93  0.0004665940
## 94  0.0004564907
## 95  0.0004527557
## 96  0.0004523501
## 97  0.0004464795
## 98  0.0004492267
## 99  0.0004541808
## 100 0.0004595979
## 101 0.0004568488
## 102 0.0004527737
## 103 0.0004615426
## 104 0.0004633774
## 105 0.0004661702
## 106 0.0004698064
## 107 0.0004620997
## 108 0.0004626297
## 109 0.0004563712
## 110 0.0004538615
## 111 0.0004472959
## 112 0.0004384031
## 113 0.0004418254
## 114 0.0004387025
## 115 0.0004387272
## 116 0.0004413632
## 117 0.0004367245
## 118 0.0004382542
## 119 0.0004441669
## 120 0.0004527998
## 121 0.0004540134
## 122 0.0004554744
## 123 0.0004482638
## 124 0.0004414662
## 125 0.0004403732
## 126 0.0004422014
## 127 0.0004372368
## 128 0.0004360230
## 129 0.0004394743
## 130 0.0004444777
## 131 0.0004498589
## 132 0.0004582883
## 133 0.0004528263
## 134 0.0004535245
## 135 0.0004529470
## 136 0.0004572064
## 137 0.0004618735
## 138 0.0004573353
## 139 0.0004604980
## 140 0.0004581574
## 141 0.0004556408
## 142 0.0004567445
## 143 0.0004501770
## 144 0.0004459109
## 145 0.0004480937
## 146 0.0004480417
## 147 0.0004480812
## 148 0.0004488093
## 149 0.0004564846
## 150 0.0004520880
## 151 0.0004547858
## 152 0.0004551548
## 153 0.0004606019
## 154 0.0004596377
## 155 0.0004564784
## 156 0.0004512367
## 157 0.0004489165
## 158 0.0004444062
## 159 0.0004537700
## 160 0.0004556477
## 161 0.0004569324
## 162 0.0004517880
## 163 0.0004559000
## 164 0.0004492941
## 165 0.0004476347
## 166 0.0004410134
## 167 0.0004458714
## 168 0.0004448503
## 169 0.0004505409
## 170 0.0004539409
## 171 0.0004526260
## 172 0.0004546233
## 173 0.0004514684
## 174 0.0004536435
## 175 0.0004531830
## 176 0.0004530818
## 177 0.0004552330
## 178 0.0004535006
## 179 0.0004542633
## 180 0.0004550379
## 181 0.0004552068
## 182 0.0004571177
## 183 0.0004592029
## 184 0.0004650437
## 185 0.0004668034
## 186 0.0004672555
## 187 0.0004682015
## 188 0.0004653387
## 189 0.0004674643
## 190 0.0004709530
## 191 0.0004718856
## 192 0.0004701853
## 193 0.0004715426
## 194 0.0004731784
## 195 0.0004751147
## 196 0.0004755968
## 197 0.0004751587
## 198 0.0004746165
## 199 0.0004743917
## 200 0.0004756202
## 201 0.0004749317
## 202 0.0004731103
## 203 0.0004717088
## 204 0.0004715263
## 205 0.0004730446
## 206 0.0004730972
## 207 0.0004723421
## 208 0.0004738401
## 209 0.0004727704
## 210 0.0004727383
## 211 0.0004726466
## 212 0.0004713520
## 213 0.0004704777
## 214 0.0004683633
## 215 0.0004682378
## 216 0.0004668158
## 217 0.0004658348
## 218 0.0004661913
## 219 0.0004653234
## 220 0.0004653834
## 221 0.0004657934
## 222 0.0004665774
## 223 0.0004669865
## 224 0.0004666995
## 225 0.0004664618
## 226 0.0004665780
## 227 0.0004670497
## 228 0.0004673052
## 229 0.0004657645
## 230 0.0004659874
## 231 0.0004664695
## 232 0.0004670043
## 233 0.0004673417
## 234 0.0004674003
## 235 0.0004671664
## 236 0.0004674086
## 237 0.0004670385
## 238 0.0004674181
## 239 0.0004674932
## 240 0.0004675061
##   nvmax
## 7     7

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##   (Intercept)            x4            x7            x9           x17 
##  2.006915e+00 -4.541413e-05  1.108905e-02  3.535570e-03  1.516178e-03 
##        stat98       stat110      sqrt.x18 
##  3.579154e-03 -3.275780e-03  2.665541e-02

Test

if (algo.forward.caret == TRUE){
    test.model(model.forward, data.test
             ,method = 'leapForward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.041   2.084   2.097   2.096   2.109   2.142 
## [1] "leapForward  Test MSE: 0.00103851731581386"

Forward Selection with CV (w/ filtered train)

Train

if (algo.forward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method =  "leapForward"
                                   ,feature.names = feature.names)
  model.forward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 13 on full training set
##     nvmax       RMSE  Rsquared        MAE       RMSESD RsquaredSD
## 1       1 0.02888038 0.1564097 0.02335547 0.0006267618 0.02633141
## 2       2 0.02766787 0.2253658 0.02249776 0.0006867294 0.02103652
## 3       3 0.02710230 0.2568330 0.02192148 0.0008105304 0.02659149
## 4       4 0.02638810 0.2952697 0.02113655 0.0007311698 0.02534274
## 5       5 0.02604900 0.3130434 0.02090228 0.0006875295 0.02296316
## 6       6 0.02593917 0.3187854 0.02084831 0.0006494856 0.02073056
## 7       7 0.02589964 0.3209157 0.02084970 0.0006015737 0.02042737
## 8       8 0.02582820 0.3246426 0.02082087 0.0005550767 0.02247168
## 9       9 0.02579308 0.3265747 0.02082103 0.0005507879 0.02199516
## 10     10 0.02571588 0.3305729 0.02078122 0.0005582145 0.02228794
## 11     11 0.02568025 0.3325064 0.02074574 0.0005288941 0.02225932
## 12     12 0.02567812 0.3326191 0.02075803 0.0005158039 0.02239029
## 13     13 0.02564510 0.3343325 0.02073002 0.0005334347 0.02150174
## 14     14 0.02565553 0.3337725 0.02074496 0.0005376892 0.02133207
## 15     15 0.02566769 0.3331426 0.02075283 0.0005322738 0.02121877
## 16     16 0.02565106 0.3339861 0.02074031 0.0005400159 0.02232892
## 17     17 0.02565684 0.3337038 0.02075486 0.0005349747 0.02244334
## 18     18 0.02569034 0.3320320 0.02077927 0.0005175302 0.02160943
## 19     19 0.02569389 0.3318903 0.02078282 0.0005137277 0.02141399
## 20     20 0.02573786 0.3296690 0.02082724 0.0005226936 0.02185297
## 21     21 0.02573791 0.3297124 0.02083071 0.0005388669 0.02248823
## 22     22 0.02573041 0.3301518 0.02080928 0.0005332016 0.02255091
## 23     23 0.02574350 0.3295116 0.02081582 0.0004973838 0.02215473
## 24     24 0.02573690 0.3298500 0.02081135 0.0005095199 0.02236121
## 25     25 0.02573415 0.3300139 0.02080063 0.0005122607 0.02223397
## 26     26 0.02572575 0.3304568 0.02079660 0.0005218498 0.02297961
## 27     27 0.02571852 0.3308000 0.02078553 0.0005223274 0.02373763
## 28     28 0.02571385 0.3310634 0.02077578 0.0005157453 0.02280322
## 29     29 0.02571815 0.3308878 0.02077422 0.0005134859 0.02268174
## 30     30 0.02569992 0.3317881 0.02076354 0.0004990451 0.02220654
## 31     31 0.02568678 0.3324897 0.02075678 0.0005247647 0.02329734
## 32     32 0.02567728 0.3329797 0.02073529 0.0005156593 0.02228844
## 33     33 0.02568738 0.3324651 0.02074242 0.0005131801 0.02198079
## 34     34 0.02566987 0.3333285 0.02073632 0.0005119259 0.02166104
## 35     35 0.02568202 0.3327907 0.02074606 0.0005088787 0.02212313
## 36     36 0.02569147 0.3323278 0.02074950 0.0005057042 0.02282192
## 37     37 0.02568064 0.3328777 0.02074211 0.0005095204 0.02173139
## 38     38 0.02567586 0.3331335 0.02074376 0.0005165740 0.02174346
## 39     39 0.02568240 0.3327828 0.02074372 0.0005246375 0.02167826
## 40     40 0.02567495 0.3331743 0.02073701 0.0005299796 0.02227179
## 41     41 0.02568006 0.3329868 0.02073915 0.0005349256 0.02252003
## 42     42 0.02567453 0.3332758 0.02073374 0.0005294200 0.02308932
## 43     43 0.02566591 0.3336973 0.02073920 0.0005438494 0.02320727
## 44     44 0.02565431 0.3343120 0.02073307 0.0005461292 0.02299408
## 45     45 0.02567237 0.3334511 0.02074824 0.0005532594 0.02303382
## 46     46 0.02567381 0.3334262 0.02074910 0.0005682394 0.02375835
## 47     47 0.02568155 0.3330864 0.02075758 0.0005586737 0.02338164
## 48     48 0.02569824 0.3322812 0.02077519 0.0005546817 0.02418786
## 49     49 0.02570104 0.3321487 0.02077400 0.0005680431 0.02461376
## 50     50 0.02571142 0.3316351 0.02078885 0.0005804232 0.02489882
## 51     51 0.02571442 0.3314867 0.02079844 0.0005859051 0.02498729
## 52     52 0.02570643 0.3318926 0.02078968 0.0005651142 0.02448059
## 53     53 0.02570448 0.3319985 0.02079594 0.0005647993 0.02458664
## 54     54 0.02570008 0.3322451 0.02079083 0.0005623948 0.02488725
## 55     55 0.02570571 0.3320055 0.02078647 0.0005615709 0.02525683
## 56     56 0.02571157 0.3317780 0.02078676 0.0005623583 0.02572261
## 57     57 0.02571958 0.3314396 0.02079727 0.0005763937 0.02603417
## 58     58 0.02570665 0.3320842 0.02078197 0.0005840367 0.02614432
## 59     59 0.02571344 0.3317911 0.02078047 0.0005840275 0.02602833
## 60     60 0.02571848 0.3315404 0.02078966 0.0005814968 0.02572808
## 61     61 0.02573274 0.3308496 0.02080515 0.0005755032 0.02528611
## 62     62 0.02573031 0.3309954 0.02080132 0.0005885288 0.02580707
## 63     63 0.02573244 0.3309269 0.02080611 0.0005808763 0.02533706
## 64     64 0.02573647 0.3307713 0.02081634 0.0005789997 0.02519088
## 65     65 0.02574425 0.3304328 0.02082181 0.0005739618 0.02508796
## 66     66 0.02574452 0.3304191 0.02082541 0.0005848907 0.02508101
## 67     67 0.02575689 0.3298633 0.02083514 0.0006033778 0.02554772
## 68     68 0.02576289 0.3295965 0.02084398 0.0006039602 0.02518274
## 69     69 0.02577383 0.3291115 0.02085838 0.0006057775 0.02511462
## 70     70 0.02576371 0.3296298 0.02084878 0.0006158543 0.02526022
## 71     71 0.02576546 0.3295439 0.02084936 0.0006068938 0.02467139
## 72     72 0.02578122 0.3287644 0.02085646 0.0006157291 0.02503141
## 73     73 0.02579290 0.3282137 0.02086260 0.0006035951 0.02473353
## 74     74 0.02580101 0.3278441 0.02087103 0.0006123236 0.02470564
## 75     75 0.02580792 0.3274803 0.02087341 0.0006075667 0.02466853
## 76     76 0.02582609 0.3265720 0.02089007 0.0006111937 0.02455794
## 77     77 0.02583220 0.3262776 0.02089593 0.0006164874 0.02470749
## 78     78 0.02582887 0.3264494 0.02089038 0.0006141500 0.02486334
## 79     79 0.02583269 0.3262365 0.02089166 0.0006053101 0.02421727
## 80     80 0.02583528 0.3261101 0.02089238 0.0005960557 0.02373935
## 81     81 0.02584918 0.3254403 0.02090405 0.0005922022 0.02334219
## 82     82 0.02585329 0.3252721 0.02090699 0.0005926366 0.02343379
## 83     83 0.02585577 0.3252160 0.02090867 0.0005908288 0.02363335
## 84     84 0.02584808 0.3255843 0.02089844 0.0005827252 0.02336556
## 85     85 0.02585037 0.3254972 0.02089943 0.0005890078 0.02333031
## 86     86 0.02583862 0.3261045 0.02089142 0.0005945725 0.02382155
## 87     87 0.02584607 0.3257735 0.02089989 0.0005958183 0.02354021
## 88     88 0.02583926 0.3261322 0.02089671 0.0005942065 0.02348262
## 89     89 0.02583517 0.3263434 0.02089851 0.0005883128 0.02316163
## 90     90 0.02583292 0.3265166 0.02089475 0.0005822011 0.02311729
## 91     91 0.02583526 0.3264475 0.02090029 0.0005773574 0.02291823
## 92     92 0.02583102 0.3266078 0.02089689 0.0005790033 0.02278655
## 93     93 0.02582694 0.3267871 0.02089638 0.0005811920 0.02294305
## 94     94 0.02582753 0.3267742 0.02089902 0.0005810938 0.02271601
## 95     95 0.02582751 0.3267936 0.02090221 0.0005762862 0.02249956
## 96     96 0.02583250 0.3265290 0.02091264 0.0005819996 0.02284583
## 97     97 0.02582775 0.3267690 0.02090785 0.0005853393 0.02297981
## 98     98 0.02582920 0.3267016 0.02091045 0.0005719549 0.02301537
## 99     99 0.02582630 0.3268618 0.02090708 0.0005776203 0.02286541
## 100   100 0.02582663 0.3268332 0.02090802 0.0005715448 0.02266071
## 101   101 0.02582455 0.3269171 0.02089884 0.0005664844 0.02284631
## 102   102 0.02582768 0.3267794 0.02089932 0.0005735925 0.02309395
## 103   103 0.02582971 0.3267084 0.02089949 0.0005805433 0.02326023
## 104   104 0.02582058 0.3271586 0.02089195 0.0005733599 0.02308826
## 105   105 0.02581718 0.3273512 0.02088530 0.0005765757 0.02280133
## 106   106 0.02582280 0.3270696 0.02089589 0.0005817636 0.02305462
## 107   107 0.02581921 0.3272573 0.02089657 0.0005846524 0.02323672
## 108   108 0.02582004 0.3272680 0.02089951 0.0005913568 0.02373511
## 109   109 0.02582519 0.3270261 0.02091109 0.0005903359 0.02371377
## 110   110 0.02582233 0.3271437 0.02091073 0.0005920933 0.02357317
## 111   111 0.02581927 0.3272747 0.02090373 0.0005907401 0.02340704
## 112   112 0.02581525 0.3274459 0.02090770 0.0005939706 0.02301589
## 113   113 0.02580944 0.3277347 0.02090535 0.0005915893 0.02304532
## 114   114 0.02580411 0.3280406 0.02090793 0.0005925728 0.02316538
## 115   115 0.02580319 0.3280795 0.02090210 0.0005887298 0.02323966
## 116   116 0.02580665 0.3279118 0.02090360 0.0005895227 0.02326606
## 117   117 0.02580821 0.3278529 0.02090678 0.0005889767 0.02315982
## 118   118 0.02581345 0.3275854 0.02091063 0.0005901964 0.02311294
## 119   119 0.02581204 0.3276748 0.02090903 0.0005904491 0.02308205
## 120   120 0.02581219 0.3276579 0.02090637 0.0005926569 0.02312392
## 121   121 0.02581338 0.3276144 0.02090873 0.0005951203 0.02310454
## 122   122 0.02581695 0.3274764 0.02091197 0.0005987173 0.02323650
## 123   123 0.02581999 0.3273198 0.02090872 0.0005971357 0.02324535
## 124   124 0.02581774 0.3274335 0.02090824 0.0005972078 0.02291369
## 125   125 0.02582471 0.3270920 0.02090976 0.0005890575 0.02272593
## 126   126 0.02582092 0.3272666 0.02090829 0.0005934350 0.02305902
## 127   127 0.02582169 0.3272288 0.02090613 0.0005938942 0.02284735
## 128   128 0.02581330 0.3276200 0.02089985 0.0005998121 0.02324178
## 129   129 0.02582012 0.3272924 0.02090335 0.0006012107 0.02325979
## 130   130 0.02582201 0.3272052 0.02090716 0.0005946930 0.02299736
## 131   131 0.02582699 0.3269543 0.02090727 0.0005978385 0.02338814
## 132   132 0.02582214 0.3271944 0.02090352 0.0005927339 0.02326222
## 133   133 0.02582100 0.3272537 0.02090416 0.0005928219 0.02333666
## 134   134 0.02582074 0.3272742 0.02090326 0.0005925040 0.02323256
## 135   135 0.02581657 0.3274721 0.02089826 0.0005958984 0.02320979
## 136   136 0.02581141 0.3277214 0.02089217 0.0005951126 0.02321542
## 137   137 0.02580408 0.3280789 0.02088594 0.0006000015 0.02331117
## 138   138 0.02580074 0.3282591 0.02088132 0.0005991602 0.02318450
## 139   139 0.02580703 0.3279597 0.02088924 0.0005971830 0.02338305
## 140   140 0.02580980 0.3278374 0.02089143 0.0006039282 0.02371603
## 141   141 0.02580528 0.3280559 0.02088707 0.0006072903 0.02391055
## 142   142 0.02580955 0.3278478 0.02088850 0.0006054965 0.02370681
## 143   143 0.02581039 0.3278084 0.02089034 0.0006118165 0.02402034
## 144   144 0.02581359 0.3276753 0.02089307 0.0006128366 0.02391777
## 145   145 0.02581080 0.3278048 0.02088765 0.0006124281 0.02396928
## 146   146 0.02581515 0.3276284 0.02088952 0.0006084623 0.02376371
## 147   147 0.02581454 0.3276622 0.02089061 0.0006089706 0.02357194
## 148   148 0.02580781 0.3280042 0.02088859 0.0006057809 0.02362373
## 149   149 0.02580926 0.3279519 0.02088819 0.0006062277 0.02364296
## 150   150 0.02581014 0.3279253 0.02088789 0.0006077092 0.02368739
## 151   151 0.02581010 0.3279386 0.02088987 0.0006089543 0.02361237
## 152   152 0.02580769 0.3280627 0.02088935 0.0006076643 0.02348084
## 153   153 0.02580895 0.3279921 0.02089211 0.0006056340 0.02346460
## 154   154 0.02580520 0.3281775 0.02088972 0.0006046600 0.02339837
## 155   155 0.02580032 0.3284219 0.02088549 0.0006030231 0.02298145
## 156   156 0.02580518 0.3281819 0.02088718 0.0005988795 0.02276936
## 157   157 0.02580260 0.3283220 0.02088633 0.0005989289 0.02264011
## 158   158 0.02579259 0.3288038 0.02087615 0.0005995867 0.02255705
## 159   159 0.02579416 0.3287176 0.02087901 0.0006050863 0.02276787
## 160   160 0.02579251 0.3288107 0.02087626 0.0006075554 0.02296789
## 161   161 0.02578891 0.3289929 0.02087096 0.0006061642 0.02322577
## 162   162 0.02579177 0.3288602 0.02087244 0.0005980397 0.02284203
## 163   163 0.02578948 0.3289531 0.02087084 0.0006006023 0.02301775
## 164   164 0.02578468 0.3291950 0.02086274 0.0006018943 0.02321438
## 165   165 0.02578301 0.3292692 0.02085999 0.0005985934 0.02327075
## 166   166 0.02578189 0.3293226 0.02085765 0.0005986537 0.02335741
## 167   167 0.02578231 0.3292977 0.02085907 0.0006019399 0.02362461
## 168   168 0.02578555 0.3291534 0.02086337 0.0005999028 0.02346599
## 169   169 0.02578217 0.3293112 0.02086372 0.0005956168 0.02318824
## 170   170 0.02578096 0.3293592 0.02086478 0.0005924658 0.02302482
## 171   171 0.02578322 0.3292567 0.02086646 0.0005898892 0.02282490
## 172   172 0.02578498 0.3291808 0.02086932 0.0005885822 0.02257354
## 173   173 0.02578130 0.3293460 0.02086624 0.0005908088 0.02265845
## 174   174 0.02578177 0.3293328 0.02086480 0.0005911418 0.02277908
## 175   175 0.02578007 0.3294202 0.02086364 0.0005935627 0.02298045
## 176   176 0.02578012 0.3294104 0.02086498 0.0005895189 0.02289113
## 177   177 0.02578131 0.3293480 0.02086651 0.0005867510 0.02275149
## 178   178 0.02578453 0.3291940 0.02087077 0.0005834808 0.02281978
## 179   179 0.02578768 0.3290485 0.02087457 0.0005827820 0.02282550
## 180   180 0.02579176 0.3288471 0.02087795 0.0005814744 0.02273010
## 181   181 0.02579371 0.3287641 0.02087851 0.0005793947 0.02256623
## 182   182 0.02579362 0.3287812 0.02088016 0.0005822530 0.02261135
## 183   183 0.02579220 0.3288587 0.02087981 0.0005813521 0.02249112
## 184   184 0.02579391 0.3287742 0.02088190 0.0005817407 0.02254340
## 185   185 0.02579152 0.3288881 0.02087895 0.0005823267 0.02250522
## 186   186 0.02578965 0.3289754 0.02087811 0.0005834626 0.02248278
## 187   187 0.02579130 0.3289014 0.02087878 0.0005867068 0.02251575
## 188   188 0.02579301 0.3288260 0.02087891 0.0005862012 0.02250843
## 189   189 0.02579465 0.3287608 0.02088010 0.0005875391 0.02252813
## 190   190 0.02579247 0.3288567 0.02087889 0.0005868492 0.02248393
## 191   191 0.02579106 0.3289247 0.02087703 0.0005855321 0.02245265
## 192   192 0.02578817 0.3290553 0.02087490 0.0005806270 0.02227197
## 193   193 0.02578708 0.3291143 0.02087262 0.0005804337 0.02224749
## 194   194 0.02578869 0.3290309 0.02087353 0.0005810898 0.02233322
## 195   195 0.02579034 0.3289574 0.02087409 0.0005803654 0.02236416
## 196   196 0.02579178 0.3288788 0.02087456 0.0005815995 0.02239959
## 197   197 0.02579055 0.3289510 0.02087359 0.0005827314 0.02249009
## 198   198 0.02579214 0.3288843 0.02087546 0.0005835827 0.02248298
## 199   199 0.02579062 0.3289560 0.02087402 0.0005844707 0.02249908
## 200   200 0.02579174 0.3289030 0.02087644 0.0005845183 0.02248883
## 201   201 0.02579304 0.3288390 0.02087639 0.0005836284 0.02239165
## 202   202 0.02579276 0.3288520 0.02087631 0.0005837610 0.02233758
## 203   203 0.02579404 0.3287930 0.02087901 0.0005829146 0.02223242
## 204   204 0.02579558 0.3287190 0.02087922 0.0005834188 0.02228375
## 205   205 0.02579772 0.3286178 0.02088020 0.0005823138 0.02219107
## 206   206 0.02579892 0.3285654 0.02088128 0.0005809090 0.02223226
## 207   207 0.02579867 0.3285809 0.02088136 0.0005844181 0.02233892
## 208   208 0.02579890 0.3285743 0.02088098 0.0005839143 0.02231316
## 209   209 0.02579878 0.3285812 0.02088050 0.0005844861 0.02235314
## 210   210 0.02579904 0.3285756 0.02088019 0.0005868091 0.02241808
## 211   211 0.02579805 0.3286224 0.02087892 0.0005869460 0.02241573
## 212   212 0.02579992 0.3285328 0.02088027 0.0005853128 0.02238425
## 213   213 0.02580126 0.3284724 0.02088208 0.0005854671 0.02239519
## 214   214 0.02580049 0.3285105 0.02088185 0.0005856100 0.02240101
## 215   215 0.02580033 0.3285163 0.02088065 0.0005867739 0.02248883
## 216   216 0.02579989 0.3285411 0.02088081 0.0005862293 0.02247018
## 217   217 0.02580050 0.3285152 0.02088170 0.0005873485 0.02252026
## 218   218 0.02580119 0.3284856 0.02088152 0.0005879841 0.02253500
## 219   219 0.02580166 0.3284622 0.02088203 0.0005889135 0.02251416
## 220   220 0.02580203 0.3284464 0.02088195 0.0005886014 0.02248331
## 221   221 0.02580261 0.3284173 0.02088168 0.0005880471 0.02244028
## 222   222 0.02580418 0.3283414 0.02088338 0.0005884707 0.02243783
## 223   223 0.02580430 0.3283402 0.02088385 0.0005894817 0.02246021
## 224   224 0.02580460 0.3283266 0.02088451 0.0005893404 0.02246173
## 225   225 0.02580541 0.3282875 0.02088516 0.0005894300 0.02242379
## 226   226 0.02580578 0.3282686 0.02088568 0.0005898879 0.02243287
## 227   227 0.02580618 0.3282490 0.02088577 0.0005892798 0.02239710
## 228   228 0.02580530 0.3282953 0.02088497 0.0005896620 0.02240748
## 229   229 0.02580519 0.3282996 0.02088502 0.0005895789 0.02242132
## 230   230 0.02580500 0.3283086 0.02088452 0.0005890612 0.02242948
## 231   231 0.02580437 0.3283363 0.02088425 0.0005887867 0.02240553
## 232   232 0.02580456 0.3283292 0.02088410 0.0005887680 0.02239511
## 233   233 0.02580418 0.3283463 0.02088389 0.0005892820 0.02241764
## 234   234 0.02580336 0.3283860 0.02088309 0.0005897805 0.02244912
## 235   235 0.02580288 0.3284094 0.02088269 0.0005892806 0.02243559
## 236   236 0.02580313 0.3283991 0.02088306 0.0005894619 0.02244298
## 237   237 0.02580313 0.3283989 0.02088291 0.0005895992 0.02245239
## 238   238 0.02580317 0.3283959 0.02088292 0.0005895545 0.02245788
## 239   239 0.02580340 0.3283851 0.02088304 0.0005896238 0.02245255
## 240   240 0.02580348 0.3283813 0.02088310 0.0005896638 0.02245074
##            MAESD
## 1   0.0004604171
## 2   0.0005295821
## 3   0.0005663676
## 4   0.0004831073
## 5   0.0004993662
## 6   0.0005070940
## 7   0.0004687843
## 8   0.0004502107
## 9   0.0004598547
## 10  0.0004621940
## 11  0.0004355906
## 12  0.0004338364
## 13  0.0004558980
## 14  0.0004441391
## 15  0.0004372342
## 16  0.0004432195
## 17  0.0004404419
## 18  0.0004403509
## 19  0.0004201433
## 20  0.0004166317
## 21  0.0004434090
## 22  0.0004507171
## 23  0.0004294308
## 24  0.0004461363
## 25  0.0004578300
## 26  0.0004739148
## 27  0.0004805325
## 28  0.0004711278
## 29  0.0004699987
## 30  0.0004448160
## 31  0.0004578997
## 32  0.0004497970
## 33  0.0004600843
## 34  0.0004645457
## 35  0.0004617882
## 36  0.0004560016
## 37  0.0004570345
## 38  0.0004657082
## 39  0.0004722526
## 40  0.0004838955
## 41  0.0004937431
## 42  0.0004851511
## 43  0.0005010650
## 44  0.0005048055
## 45  0.0005220898
## 46  0.0005348349
## 47  0.0005278490
## 48  0.0005193737
## 49  0.0005293235
## 50  0.0005574183
## 51  0.0005669865
## 52  0.0005470428
## 53  0.0005525746
## 54  0.0005405494
## 55  0.0005409315
## 56  0.0005491505
## 57  0.0005552629
## 58  0.0005624205
## 59  0.0005609408
## 60  0.0005593707
## 61  0.0005601565
## 62  0.0005656455
## 63  0.0005599612
## 64  0.0005586633
## 65  0.0005635173
## 66  0.0005755445
## 67  0.0005909006
## 68  0.0005861716
## 69  0.0005941534
## 70  0.0006051794
## 71  0.0005950312
## 72  0.0006112488
## 73  0.0006016017
## 74  0.0005990323
## 75  0.0005915225
## 76  0.0005951866
## 77  0.0006113831
## 78  0.0006065219
## 79  0.0005985225
## 80  0.0005903204
## 81  0.0005944194
## 82  0.0005981555
## 83  0.0005983609
## 84  0.0005913369
## 85  0.0005916722
## 86  0.0005836075
## 87  0.0005867378
## 88  0.0005856735
## 89  0.0005788857
## 90  0.0005723771
## 91  0.0005748425
## 92  0.0005725895
## 93  0.0005738091
## 94  0.0005674578
## 95  0.0005634736
## 96  0.0005723432
## 97  0.0005703462
## 98  0.0005583679
## 99  0.0005680595
## 100 0.0005648277
## 101 0.0005598380
## 102 0.0005716468
## 103 0.0005756319
## 104 0.0005663924
## 105 0.0005683870
## 106 0.0005781467
## 107 0.0005808023
## 108 0.0005848309
## 109 0.0005856044
## 110 0.0005938172
## 111 0.0005926137
## 112 0.0005918957
## 113 0.0005904336
## 114 0.0005895499
## 115 0.0005894774
## 116 0.0005883033
## 117 0.0005883101
## 118 0.0005840972
## 119 0.0005814825
## 120 0.0005810610
## 121 0.0005832566
## 122 0.0005857325
## 123 0.0005829023
## 124 0.0005890280
## 125 0.0005812478
## 126 0.0005848219
## 127 0.0005807152
## 128 0.0005824537
## 129 0.0005792415
## 130 0.0005756383
## 131 0.0005792144
## 132 0.0005797011
## 133 0.0005805971
## 134 0.0005870293
## 135 0.0005873961
## 136 0.0005915482
## 137 0.0005962929
## 138 0.0005897653
## 139 0.0005822294
## 140 0.0005854431
## 141 0.0005871587
## 142 0.0005875907
## 143 0.0005938466
## 144 0.0005966136
## 145 0.0005951390
## 146 0.0005935034
## 147 0.0005982994
## 148 0.0005957568
## 149 0.0005961702
## 150 0.0005937219
## 151 0.0005951351
## 152 0.0005945673
## 153 0.0005929232
## 154 0.0005909723
## 155 0.0005929431
## 156 0.0005893438
## 157 0.0005883026
## 158 0.0005899555
## 159 0.0005937603
## 160 0.0005917288
## 161 0.0005940671
## 162 0.0005861211
## 163 0.0005876271
## 164 0.0005873892
## 165 0.0005883226
## 166 0.0005886323
## 167 0.0005897915
## 168 0.0005871572
## 169 0.0005858670
## 170 0.0005822736
## 171 0.0005785971
## 172 0.0005809589
## 173 0.0005803280
## 174 0.0005813352
## 175 0.0005830233
## 176 0.0005801820
## 177 0.0005751898
## 178 0.0005727400
## 179 0.0005732457
## 180 0.0005758422
## 181 0.0005741822
## 182 0.0005754254
## 183 0.0005736250
## 184 0.0005736178
## 185 0.0005763587
## 186 0.0005790358
## 187 0.0005801744
## 188 0.0005793279
## 189 0.0005817473
## 190 0.0005808045
## 191 0.0005793380
## 192 0.0005764986
## 193 0.0005751428
## 194 0.0005751462
## 195 0.0005757867
## 196 0.0005770206
## 197 0.0005790611
## 198 0.0005795640
## 199 0.0005798079
## 200 0.0005814693
## 201 0.0005807335
## 202 0.0005808919
## 203 0.0005801940
## 204 0.0005819659
## 205 0.0005815627
## 206 0.0005808254
## 207 0.0005846257
## 208 0.0005848515
## 209 0.0005862627
## 210 0.0005877891
## 211 0.0005883912
## 212 0.0005875535
## 213 0.0005886005
## 214 0.0005868758
## 215 0.0005884643
## 216 0.0005874744
## 217 0.0005887474
## 218 0.0005902757
## 219 0.0005910193
## 220 0.0005901838
## 221 0.0005899732
## 222 0.0005903717
## 223 0.0005912456
## 224 0.0005909634
## 225 0.0005908327
## 226 0.0005912975
## 227 0.0005905947
## 228 0.0005909874
## 229 0.0005907499
## 230 0.0005903754
## 231 0.0005900148
## 232 0.0005902183
## 233 0.0005905081
## 234 0.0005910302
## 235 0.0005905733
## 236 0.0005909074
## 237 0.0005907941
## 238 0.0005905927
## 239 0.0005905728
## 240 0.0005905793
##    nvmax
## 13    13

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##   (Intercept)            x4            x7            x8            x9 
##  1.961247e+00 -5.186788e-05  1.234233e-02  5.954491e-04  3.245467e-03 
##           x10           x11           x16           x17        stat14 
##  1.445759e-03  2.412145e+05  8.242512e-04  1.494800e-03 -8.523881e-04 
##        stat23        stat98       stat110      sqrt.x18 
##  7.175290e-04  3.343452e-03 -3.289042e-03  2.672089e-02

Test

if (algo.forward.caret == TRUE){
  test.model(model.forward, data.test
             ,method = 'leapForward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.035   2.081   2.093   2.093   2.106   2.145 
## [1] "leapForward  Test MSE: 0.00104891376260811"

Backward Elimination

Train

if (algo.backward == TRUE){
  # Takes too much time
  t1 = Sys.time()
  
  model.backward = step(model.full, data = data.train, direction="backward", trace = 0)
  print(summary(model.backward))

  t2 = Sys.time()
  print (paste("Time taken for Backward Elimination: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.backward, data.train)
}

Test

if (algo.backward == TRUE){
  test.model(model.backard, data.test, "Backward Elimination")
}

Backward Elimination with CV (w/ full train)

Train

if (algo.backward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "leapBackward"
                                   ,feature.names =  feature.names)
  model.backward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 21 on full training set
##     nvmax       RMSE  Rsquared        MAE       RMSESD RsquaredSD
## 1       1 0.03410610 0.1149630 0.02657577 0.0012861157 0.02115810
## 2       2 0.03328304 0.1574261 0.02582705 0.0011082962 0.02564652
## 3       3 0.03267992 0.1875344 0.02522129 0.0010042512 0.02555459
## 4       4 0.03218662 0.2115343 0.02451101 0.0010005851 0.02634272
## 5       5 0.03186632 0.2271994 0.02432100 0.0009679660 0.02729495
## 6       6 0.03185480 0.2277527 0.02432260 0.0009428484 0.02753486
## 7       7 0.03174204 0.2329557 0.02426652 0.0009365345 0.02626930
## 8       8 0.03177661 0.2313360 0.02430354 0.0009495410 0.02509086
## 9       9 0.03177342 0.2315882 0.02429654 0.0009401013 0.02649936
## 10     10 0.03174421 0.2330436 0.02428987 0.0009336330 0.02724717
## 11     11 0.03175240 0.2327719 0.02430959 0.0009426783 0.02703386
## 12     12 0.03174790 0.2330175 0.02429329 0.0009361445 0.02713821
## 13     13 0.03175640 0.2326937 0.02430729 0.0009210531 0.02750259
## 14     14 0.03174821 0.2331087 0.02430265 0.0008876305 0.02909070
## 15     15 0.03175781 0.2326158 0.02430769 0.0009302689 0.02811104
## 16     16 0.03177382 0.2318280 0.02431036 0.0009445884 0.02676660
## 17     17 0.03178911 0.2311487 0.02432604 0.0009658569 0.02635184
## 18     18 0.03177643 0.2317125 0.02431717 0.0009793686 0.02490692
## 19     19 0.03175964 0.2325262 0.02429739 0.0009456329 0.02627921
## 20     20 0.03175338 0.2328159 0.02429680 0.0009786020 0.02653806
## 21     21 0.03173935 0.2334905 0.02428508 0.0009840306 0.02545556
## 22     22 0.03175723 0.2326583 0.02429844 0.0009827134 0.02523360
## 23     23 0.03177324 0.2319769 0.02431065 0.0009664176 0.02500459
## 24     24 0.03178101 0.2316240 0.02431494 0.0009746416 0.02498553
## 25     25 0.03178794 0.2313409 0.02430940 0.0009701570 0.02535667
## 26     26 0.03178550 0.2315099 0.02430897 0.0009525656 0.02613484
## 27     27 0.03178503 0.2316293 0.02430735 0.0009302627 0.02636797
## 28     28 0.03179885 0.2310416 0.02432687 0.0009317546 0.02652259
## 29     29 0.03180255 0.2309000 0.02432941 0.0009380593 0.02679688
## 30     30 0.03180062 0.2309386 0.02432064 0.0009642642 0.02624096
## 31     31 0.03180474 0.2307492 0.02432622 0.0009667548 0.02704769
## 32     32 0.03182643 0.2298072 0.02434847 0.0009654710 0.02666004
## 33     33 0.03183422 0.2294918 0.02435875 0.0009880193 0.02646569
## 34     34 0.03184403 0.2290906 0.02437048 0.0009826630 0.02640569
## 35     35 0.03186398 0.2281715 0.02437777 0.0009848969 0.02491418
## 36     36 0.03188623 0.2271867 0.02439776 0.0009772063 0.02510442
## 37     37 0.03189688 0.2267396 0.02440640 0.0009848116 0.02451533
## 38     38 0.03189749 0.2267378 0.02440712 0.0009965136 0.02346400
## 39     39 0.03189772 0.2267426 0.02440672 0.0009965048 0.02350484
## 40     40 0.03188308 0.2274345 0.02440556 0.0009967017 0.02308491
## 41     41 0.03189219 0.2270984 0.02440958 0.0010122723 0.02296109
## 42     42 0.03191386 0.2261857 0.02443666 0.0010135977 0.02329331
## 43     43 0.03193363 0.2253295 0.02444919 0.0010093464 0.02326704
## 44     44 0.03193395 0.2253455 0.02445311 0.0010057347 0.02336262
## 45     45 0.03193616 0.2252877 0.02445102 0.0009973765 0.02400841
## 46     46 0.03193004 0.2256258 0.02444756 0.0009919646 0.02463915
## 47     47 0.03193271 0.2255070 0.02445240 0.0009802941 0.02474200
## 48     48 0.03194622 0.2249318 0.02445795 0.0009862503 0.02449970
## 49     49 0.03196465 0.2241268 0.02447800 0.0009803599 0.02479855
## 50     50 0.03195945 0.2244088 0.02447681 0.0009803458 0.02500255
## 51     51 0.03197023 0.2239340 0.02448702 0.0009887533 0.02492097
## 52     52 0.03196299 0.2242518 0.02448094 0.0010084516 0.02482320
## 53     53 0.03195243 0.2247351 0.02447428 0.0010218759 0.02491747
## 54     54 0.03195447 0.2246253 0.02446889 0.0010292820 0.02443268
## 55     55 0.03194900 0.2248975 0.02446582 0.0010272742 0.02435764
## 56     56 0.03196646 0.2241897 0.02448574 0.0010297665 0.02479090
## 57     57 0.03196967 0.2240847 0.02448937 0.0010115033 0.02518276
## 58     58 0.03197233 0.2240167 0.02449291 0.0010150578 0.02526220
## 59     59 0.03197418 0.2239440 0.02449685 0.0010111162 0.02557746
## 60     60 0.03197957 0.2237494 0.02450787 0.0010029230 0.02554756
## 61     61 0.03198569 0.2234916 0.02450371 0.0009835603 0.02614840
## 62     62 0.03199807 0.2229881 0.02450440 0.0009816007 0.02656110
## 63     63 0.03199817 0.2230134 0.02450527 0.0009905666 0.02637606
## 64     64 0.03200846 0.2225824 0.02451152 0.0009874893 0.02651540
## 65     65 0.03200332 0.2228327 0.02450448 0.0009808047 0.02673870
## 66     66 0.03200666 0.2226796 0.02451076 0.0009887496 0.02668168
## 67     67 0.03201699 0.2222241 0.02451680 0.0009906531 0.02598043
## 68     68 0.03201825 0.2222007 0.02451956 0.0009921786 0.02641773
## 69     69 0.03201962 0.2222185 0.02451188 0.0009913407 0.02686178
## 70     70 0.03203755 0.2214853 0.02452414 0.0009873443 0.02745757
## 71     71 0.03203259 0.2217402 0.02452795 0.0009952913 0.02736407
## 72     72 0.03204207 0.2213363 0.02453509 0.0009952307 0.02784528
## 73     73 0.03205438 0.2208065 0.02453749 0.0010010351 0.02782146
## 74     74 0.03205200 0.2209117 0.02453727 0.0009891835 0.02747961
## 75     75 0.03206266 0.2204415 0.02453910 0.0009876219 0.02745146
## 76     76 0.03206407 0.2204362 0.02453660 0.0009879116 0.02707874
## 77     77 0.03206083 0.2205941 0.02453477 0.0009846399 0.02737700
## 78     78 0.03207119 0.2201542 0.02454253 0.0009803147 0.02697466
## 79     79 0.03207536 0.2199857 0.02454560 0.0009776743 0.02712203
## 80     80 0.03207472 0.2200595 0.02454699 0.0009820153 0.02771544
## 81     81 0.03208023 0.2198302 0.02455382 0.0009905057 0.02763422
## 82     82 0.03208116 0.2198268 0.02455111 0.0009886707 0.02809662
## 83     83 0.03208485 0.2196365 0.02454795 0.0009946036 0.02777544
## 84     84 0.03208038 0.2198009 0.02454448 0.0009946813 0.02758717
## 85     85 0.03208472 0.2196182 0.02455277 0.0009938144 0.02799061
## 86     86 0.03208625 0.2195444 0.02455738 0.0009914175 0.02766385
## 87     87 0.03209925 0.2189983 0.02456521 0.0009825900 0.02791736
## 88     88 0.03209417 0.2192246 0.02456524 0.0009803226 0.02778533
## 89     89 0.03209894 0.2190303 0.02456644 0.0009754795 0.02787744
## 90     90 0.03209838 0.2191245 0.02456505 0.0009819499 0.02800324
## 91     91 0.03209825 0.2191558 0.02456419 0.0009833084 0.02778367
## 92     92 0.03210400 0.2189652 0.02456694 0.0009856553 0.02812231
## 93     93 0.03210434 0.2189801 0.02457084 0.0009801447 0.02823996
## 94     94 0.03210060 0.2191943 0.02456772 0.0009654844 0.02819118
## 95     95 0.03210529 0.2190246 0.02457198 0.0009636203 0.02831531
## 96     96 0.03210450 0.2190624 0.02457628 0.0009700206 0.02832323
## 97     97 0.03209898 0.2192976 0.02457105 0.0009694717 0.02822326
## 98     98 0.03210560 0.2190003 0.02457457 0.0009800695 0.02833946
## 99     99 0.03211869 0.2184357 0.02458472 0.0009773209 0.02789135
## 100   100 0.03212527 0.2181999 0.02458565 0.0009787023 0.02804229
## 101   101 0.03212285 0.2183577 0.02458330 0.0009766513 0.02831127
## 102   102 0.03212769 0.2181718 0.02458644 0.0009780267 0.02795472
## 103   103 0.03212853 0.2181778 0.02459237 0.0009802766 0.02799265
## 104   104 0.03213054 0.2181329 0.02459439 0.0009807890 0.02792203
## 105   105 0.03212531 0.2183903 0.02459002 0.0009831817 0.02779635
## 106   106 0.03212718 0.2183151 0.02458924 0.0009909548 0.02796117
## 107   107 0.03213148 0.2181438 0.02459588 0.0009866200 0.02784121
## 108   108 0.03212901 0.2182526 0.02459664 0.0009872206 0.02797870
## 109   109 0.03214069 0.2177782 0.02460351 0.0009937091 0.02772076
## 110   110 0.03214189 0.2177448 0.02460644 0.0009939408 0.02807672
## 111   111 0.03214224 0.2177197 0.02460623 0.0009903644 0.02810488
## 112   112 0.03214342 0.2176613 0.02460782 0.0009970808 0.02800171
## 113   113 0.03214129 0.2177585 0.02460478 0.0009923991 0.02814922
## 114   114 0.03214099 0.2177672 0.02460829 0.0009898204 0.02794168
## 115   115 0.03214447 0.2176380 0.02461003 0.0009822946 0.02809847
## 116   116 0.03214458 0.2176760 0.02461034 0.0009887367 0.02799482
## 117   117 0.03215664 0.2171590 0.02462468 0.0009890154 0.02781091
## 118   118 0.03216167 0.2169618 0.02462651 0.0009924178 0.02824895
## 119   119 0.03215972 0.2170612 0.02462358 0.0009974675 0.02829916
## 120   120 0.03216148 0.2169841 0.02462209 0.0010072158 0.02835169
## 121   121 0.03216677 0.2167314 0.02462646 0.0010086517 0.02820218
## 122   122 0.03217018 0.2165822 0.02462492 0.0009989982 0.02813177
## 123   123 0.03217773 0.2162525 0.02462955 0.0009983265 0.02778428
## 124   124 0.03218328 0.2160552 0.02463655 0.0009976690 0.02782595
## 125   125 0.03217901 0.2162559 0.02463380 0.0009963381 0.02805329
## 126   126 0.03217317 0.2165350 0.02463161 0.0009898299 0.02798105
## 127   127 0.03216966 0.2166961 0.02462959 0.0009920346 0.02769300
## 128   128 0.03217511 0.2164790 0.02463733 0.0009920044 0.02767984
## 129   129 0.03217196 0.2165960 0.02463630 0.0009978645 0.02759374
## 130   130 0.03217441 0.2165134 0.02463715 0.0010006729 0.02762871
## 131   131 0.03218228 0.2161996 0.02464428 0.0009990523 0.02764652
## 132   132 0.03218117 0.2162294 0.02464626 0.0010039255 0.02758428
## 133   133 0.03218240 0.2161854 0.02464686 0.0010014319 0.02754837
## 134   134 0.03218657 0.2160732 0.02464735 0.0010026138 0.02765308
## 135   135 0.03218363 0.2162047 0.02464800 0.0010007020 0.02763048
## 136   136 0.03218603 0.2161304 0.02464921 0.0010081763 0.02742975
## 137   137 0.03217995 0.2163820 0.02464368 0.0010044814 0.02735719
## 138   138 0.03217741 0.2165008 0.02463995 0.0010039257 0.02717285
## 139   139 0.03217691 0.2165094 0.02463613 0.0010079211 0.02701616
## 140   140 0.03218028 0.2163764 0.02463569 0.0010124188 0.02692202
## 141   141 0.03218624 0.2161521 0.02464017 0.0010125825 0.02688385
## 142   142 0.03219537 0.2157547 0.02465103 0.0010143060 0.02682086
## 143   143 0.03220021 0.2155697 0.02465256 0.0010121680 0.02685285
## 144   144 0.03220340 0.2154408 0.02465567 0.0010086419 0.02696587
## 145   145 0.03220323 0.2154612 0.02465466 0.0010072197 0.02712649
## 146   146 0.03220615 0.2153383 0.02465568 0.0010111334 0.02726330
## 147   147 0.03220933 0.2152114 0.02465549 0.0010130228 0.02711207
## 148   148 0.03221524 0.2149885 0.02465565 0.0010161915 0.02738923
## 149   149 0.03221353 0.2150468 0.02465379 0.0010243199 0.02723500
## 150   150 0.03221589 0.2149345 0.02465345 0.0010275719 0.02697666
## 151   151 0.03221819 0.2148270 0.02465558 0.0010292177 0.02714007
## 152   152 0.03221929 0.2147485 0.02465630 0.0010293177 0.02696511
## 153   153 0.03221991 0.2147198 0.02465699 0.0010335789 0.02687222
## 154   154 0.03222420 0.2145461 0.02465916 0.0010310222 0.02691348
## 155   155 0.03222711 0.2144446 0.02466218 0.0010266696 0.02712656
## 156   156 0.03223619 0.2140530 0.02466660 0.0010286898 0.02717952
## 157   157 0.03224181 0.2137987 0.02466921 0.0010306819 0.02700361
## 158   158 0.03223705 0.2140214 0.02466559 0.0010289106 0.02696976
## 159   159 0.03223360 0.2141595 0.02466473 0.0010339439 0.02695353
## 160   160 0.03223173 0.2142344 0.02466349 0.0010352421 0.02686554
## 161   161 0.03223019 0.2143146 0.02466177 0.0010354535 0.02700547
## 162   162 0.03222553 0.2145163 0.02465750 0.0010336107 0.02704354
## 163   163 0.03222476 0.2145464 0.02465840 0.0010363876 0.02698129
## 164   164 0.03222191 0.2146655 0.02465443 0.0010310510 0.02702169
## 165   165 0.03221676 0.2148991 0.02465257 0.0010286564 0.02696774
## 166   166 0.03221585 0.2149465 0.02464896 0.0010229804 0.02697569
## 167   167 0.03221910 0.2148206 0.02465297 0.0010262417 0.02689212
## 168   168 0.03221552 0.2149850 0.02465197 0.0010291154 0.02706303
## 169   169 0.03222021 0.2147986 0.02465958 0.0010273574 0.02710324
## 170   170 0.03222106 0.2147695 0.02466060 0.0010273249 0.02718464
## 171   171 0.03222330 0.2146761 0.02466399 0.0010277685 0.02701815
## 172   172 0.03222364 0.2146640 0.02466646 0.0010256659 0.02700205
## 173   173 0.03222302 0.2147088 0.02466638 0.0010265446 0.02689653
## 174   174 0.03222351 0.2146887 0.02466643 0.0010286469 0.02676358
## 175   175 0.03222466 0.2146571 0.02466432 0.0010316099 0.02681563
## 176   176 0.03222912 0.2144594 0.02466978 0.0010341659 0.02671964
## 177   177 0.03223220 0.2143265 0.02467385 0.0010340666 0.02670874
## 178   178 0.03223165 0.2143418 0.02467448 0.0010318501 0.02664083
## 179   179 0.03223126 0.2143676 0.02467398 0.0010312085 0.02673380
## 180   180 0.03223281 0.2143009 0.02467374 0.0010314201 0.02686293
## 181   181 0.03223690 0.2141224 0.02467733 0.0010299603 0.02691721
## 182   182 0.03223190 0.2143298 0.02467426 0.0010312906 0.02701837
## 183   183 0.03222869 0.2144640 0.02467309 0.0010336079 0.02705171
## 184   184 0.03223128 0.2143541 0.02467655 0.0010369733 0.02715954
## 185   185 0.03222449 0.2146414 0.02467279 0.0010377651 0.02720021
## 186   186 0.03222232 0.2147351 0.02467095 0.0010366830 0.02717686
## 187   187 0.03222255 0.2147256 0.02467203 0.0010366656 0.02726421
## 188   188 0.03222582 0.2145900 0.02467648 0.0010381219 0.02727544
## 189   189 0.03222725 0.2145114 0.02467832 0.0010422847 0.02720718
## 190   190 0.03222934 0.2144297 0.02468033 0.0010440887 0.02729145
## 191   191 0.03222926 0.2144298 0.02467897 0.0010433988 0.02734322
## 192   192 0.03222938 0.2144212 0.02467963 0.0010441770 0.02744350
## 193   193 0.03223054 0.2143701 0.02468088 0.0010438126 0.02745762
## 194   194 0.03223178 0.2143165 0.02468278 0.0010406123 0.02738261
## 195   195 0.03223065 0.2143718 0.02468234 0.0010400593 0.02741552
## 196   196 0.03222926 0.2144278 0.02468114 0.0010404049 0.02750016
## 197   197 0.03222897 0.2144278 0.02468163 0.0010440271 0.02745283
## 198   198 0.03223127 0.2143341 0.02468367 0.0010440249 0.02753839
## 199   199 0.03223049 0.2143638 0.02468279 0.0010439267 0.02750348
## 200   200 0.03223011 0.2143799 0.02468214 0.0010477634 0.02755269
## 201   201 0.03222937 0.2144093 0.02468108 0.0010476185 0.02764257
## 202   202 0.03223170 0.2143143 0.02468308 0.0010448119 0.02759767
## 203   203 0.03222986 0.2143986 0.02468219 0.0010429669 0.02758904
## 204   204 0.03223128 0.2143422 0.02468494 0.0010437813 0.02748991
## 205   205 0.03223160 0.2143291 0.02468513 0.0010444590 0.02757676
## 206   206 0.03223155 0.2143227 0.02468628 0.0010464766 0.02756180
## 207   207 0.03223262 0.2142770 0.02468709 0.0010461715 0.02757732
## 208   208 0.03223266 0.2142798 0.02468622 0.0010455724 0.02765699
## 209   209 0.03223301 0.2142686 0.02468618 0.0010443455 0.02762081
## 210   210 0.03223320 0.2142497 0.02468694 0.0010465787 0.02753182
## 211   211 0.03223318 0.2142541 0.02468825 0.0010473600 0.02750010
## 212   212 0.03223250 0.2142915 0.02468787 0.0010461387 0.02754856
## 213   213 0.03223343 0.2142513 0.02468865 0.0010461559 0.02751035
## 214   214 0.03223389 0.2142325 0.02468867 0.0010437209 0.02746146
## 215   215 0.03223403 0.2142238 0.02468962 0.0010456569 0.02733998
## 216   216 0.03223479 0.2141985 0.02469043 0.0010441398 0.02729859
## 217   217 0.03223538 0.2141725 0.02469075 0.0010437772 0.02732615
## 218   218 0.03223540 0.2141690 0.02469092 0.0010442496 0.02728024
## 219   219 0.03223475 0.2142005 0.02469020 0.0010446956 0.02728845
## 220   220 0.03223428 0.2142236 0.02468977 0.0010444562 0.02725898
## 221   221 0.03223504 0.2141853 0.02469117 0.0010445416 0.02720648
## 222   222 0.03223526 0.2141694 0.02469290 0.0010456283 0.02720763
## 223   223 0.03223507 0.2141796 0.02469298 0.0010452188 0.02726817
## 224   224 0.03223452 0.2142026 0.02469286 0.0010448990 0.02725018
## 225   225 0.03223502 0.2141819 0.02469298 0.0010445208 0.02725529
## 226   226 0.03223463 0.2141981 0.02469295 0.0010447328 0.02723686
## 227   227 0.03223413 0.2142199 0.02469274 0.0010449003 0.02723661
## 228   228 0.03223320 0.2142582 0.02469156 0.0010457992 0.02721714
## 229   229 0.03223376 0.2142368 0.02469243 0.0010446661 0.02720360
## 230   230 0.03223402 0.2142238 0.02469267 0.0010448681 0.02719640
## 231   231 0.03223373 0.2142371 0.02469240 0.0010450460 0.02721503
## 232   232 0.03223404 0.2142241 0.02469243 0.0010453876 0.02723733
## 233   233 0.03223429 0.2142132 0.02469274 0.0010456495 0.02726316
## 234   234 0.03223392 0.2142269 0.02469274 0.0010457886 0.02725864
## 235   235 0.03223414 0.2142182 0.02469299 0.0010459490 0.02725502
## 236   236 0.03223406 0.2142203 0.02469294 0.0010461860 0.02728085
## 237   237 0.03223403 0.2142210 0.02469284 0.0010459806 0.02725711
## 238   238 0.03223436 0.2142079 0.02469323 0.0010461352 0.02727336
## 239   239 0.03223419 0.2142160 0.02469319 0.0010460914 0.02727348
## 240   240 0.03223413 0.2142186 0.02469321 0.0010461107 0.02727501
##            MAESD
## 1   0.0006039987
## 2   0.0005560515
## 3   0.0005161705
## 4   0.0005030416
## 5   0.0004516171
## 6   0.0004274403
## 7   0.0004386333
## 8   0.0004116784
## 9   0.0004027648
## 10  0.0003804190
## 11  0.0003827175
## 12  0.0003714371
## 13  0.0003683846
## 14  0.0003455862
## 15  0.0003658994
## 16  0.0003688301
## 17  0.0003957368
## 18  0.0004141335
## 19  0.0003935137
## 20  0.0004296221
## 21  0.0004303736
## 22  0.0004185360
## 23  0.0004078318
## 24  0.0004093406
## 25  0.0004080051
## 26  0.0004010876
## 27  0.0003913574
## 28  0.0003946936
## 29  0.0004037086
## 30  0.0004107282
## 31  0.0004226944
## 32  0.0004225034
## 33  0.0004448925
## 34  0.0004395554
## 35  0.0004402790
## 36  0.0004361420
## 37  0.0004482860
## 38  0.0004605232
## 39  0.0004645625
## 40  0.0004664103
## 41  0.0004825004
## 42  0.0004953591
## 43  0.0004961167
## 44  0.0004970095
## 45  0.0004823461
## 46  0.0004800355
## 47  0.0004673817
## 48  0.0004671513
## 49  0.0004575740
## 50  0.0004538221
## 51  0.0004545868
## 52  0.0004697742
## 53  0.0004648600
## 54  0.0004673599
## 55  0.0004620623
## 56  0.0004669444
## 57  0.0004512827
## 58  0.0004522348
## 59  0.0004465430
## 60  0.0004446658
## 61  0.0004252333
## 62  0.0004267543
## 63  0.0004375628
## 64  0.0004362941
## 65  0.0004314322
## 66  0.0004333280
## 67  0.0004301794
## 68  0.0004320149
## 69  0.0004320135
## 70  0.0004455633
## 71  0.0004521234
## 72  0.0004604000
## 73  0.0004654286
## 74  0.0004539451
## 75  0.0004571830
## 76  0.0004519315
## 77  0.0004484431
## 78  0.0004450123
## 79  0.0004611947
## 80  0.0004803766
## 81  0.0004789181
## 82  0.0004805474
## 83  0.0004796394
## 84  0.0004781293
## 85  0.0004735814
## 86  0.0004631454
## 87  0.0004591645
## 88  0.0004536634
## 89  0.0004457315
## 90  0.0004525686
## 91  0.0004540103
## 92  0.0004584299
## 93  0.0004567965
## 94  0.0004435839
## 95  0.0004467587
## 96  0.0004418337
## 97  0.0004563701
## 98  0.0004690919
## 99  0.0004598628
## 100 0.0004640794
## 101 0.0004640015
## 102 0.0004560496
## 103 0.0004648776
## 104 0.0004599889
## 105 0.0004620722
## 106 0.0004633503
## 107 0.0004554726
## 108 0.0004541538
## 109 0.0004492258
## 110 0.0004458360
## 111 0.0004461623
## 112 0.0004503142
## 113 0.0004449964
## 114 0.0004438039
## 115 0.0004373721
## 116 0.0004417682
## 117 0.0004388079
## 118 0.0004509588
## 119 0.0004527550
## 120 0.0004564964
## 121 0.0004607525
## 122 0.0004530242
## 123 0.0004445684
## 124 0.0004437379
## 125 0.0004428616
## 126 0.0004383164
## 127 0.0004439654
## 128 0.0004425114
## 129 0.0004475278
## 130 0.0004539906
## 131 0.0004555961
## 132 0.0004548395
## 133 0.0004500750
## 134 0.0004548076
## 135 0.0004505811
## 136 0.0004532243
## 137 0.0004523636
## 138 0.0004523215
## 139 0.0004536599
## 140 0.0004508349
## 141 0.0004520956
## 142 0.0004529830
## 143 0.0004506338
## 144 0.0004482863
## 145 0.0004452007
## 146 0.0004465150
## 147 0.0004506502
## 148 0.0004551106
## 149 0.0004572718
## 150 0.0004541435
## 151 0.0004544823
## 152 0.0004551410
## 153 0.0004611971
## 154 0.0004588877
## 155 0.0004558186
## 156 0.0004526859
## 157 0.0004483682
## 158 0.0004470474
## 159 0.0004521961
## 160 0.0004551061
## 161 0.0004572308
## 162 0.0004519747
## 163 0.0004539580
## 164 0.0004492109
## 165 0.0004472587
## 166 0.0004416734
## 167 0.0004462159
## 168 0.0004485969
## 169 0.0004460821
## 170 0.0004482488
## 171 0.0004496480
## 172 0.0004542897
## 173 0.0004531522
## 174 0.0004527769
## 175 0.0004524037
## 176 0.0004526027
## 177 0.0004543979
## 178 0.0004540366
## 179 0.0004554637
## 180 0.0004566642
## 181 0.0004545913
## 182 0.0004598192
## 183 0.0004621286
## 184 0.0004650437
## 185 0.0004664027
## 186 0.0004664385
## 187 0.0004679595
## 188 0.0004653387
## 189 0.0004674643
## 190 0.0004709530
## 191 0.0004717812
## 192 0.0004715297
## 193 0.0004726529
## 194 0.0004715795
## 195 0.0004698748
## 196 0.0004716496
## 197 0.0004727940
## 198 0.0004731631
## 199 0.0004723402
## 200 0.0004763508
## 201 0.0004749317
## 202 0.0004731103
## 203 0.0004717088
## 204 0.0004715263
## 205 0.0004730446
## 206 0.0004730972
## 207 0.0004722764
## 208 0.0004738190
## 209 0.0004720531
## 210 0.0004721833
## 211 0.0004724902
## 212 0.0004712165
## 213 0.0004703265
## 214 0.0004683633
## 215 0.0004682378
## 216 0.0004670981
## 217 0.0004664416
## 218 0.0004662273
## 219 0.0004660352
## 220 0.0004662158
## 221 0.0004657934
## 222 0.0004665774
## 223 0.0004669865
## 224 0.0004666995
## 225 0.0004664618
## 226 0.0004665780
## 227 0.0004670497
## 228 0.0004673052
## 229 0.0004660760
## 230 0.0004663063
## 231 0.0004664695
## 232 0.0004670043
## 233 0.0004673417
## 234 0.0004674003
## 235 0.0004671664
## 236 0.0004674086
## 237 0.0004670385
## 238 0.0004674181
## 239 0.0004674932
## 240 0.0004675061
##    nvmax
## 21    21

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##   (Intercept)            x4            x7            x8            x9 
##  1.968225e+00 -4.615460e-05  1.126211e-02  5.378319e-04  3.521503e-03 
##           x10           x11           x16           x17           x21 
##  1.089648e-03  1.907896e+05  8.000674e-04  1.492599e-03  1.314116e-04 
##        stat14        stat23        stat60        stat98       stat104 
## -7.007932e-04  7.363328e-04  6.162420e-04  3.486509e-03 -5.752055e-04 
##       stat110       stat144       stat149       stat187       stat198 
## -3.348495e-03  6.801916e-04 -6.903981e-04 -6.306481e-04 -5.287456e-04 
##       stat204      sqrt.x18 
## -5.279347e-04  2.669560e-02

Test

if (algo.backward.caret == TRUE){
  test.model(model.backward, data.test
             ,method = 'leapBackward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.034   2.084   2.097   2.096   2.109   2.147 
## [1] "leapBackward  Test MSE: 0.00103959607798376"

Backward Elimination with CV (w/ filtered train)

Train

if (algo.backward.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method =  "leapBackward"
                                   ,feature.names = feature.names)
  model.backward = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 17 on full training set
##     nvmax       RMSE  Rsquared        MAE       RMSESD RsquaredSD
## 1       1 0.02888038 0.1564097 0.02335547 0.0006267618 0.02633141
## 2       2 0.02766787 0.2253658 0.02249776 0.0006867294 0.02103652
## 3       3 0.02710230 0.2568330 0.02192148 0.0008105304 0.02659149
## 4       4 0.02638810 0.2952697 0.02113655 0.0007311698 0.02534274
## 5       5 0.02604900 0.3130434 0.02090228 0.0006875295 0.02296316
## 6       6 0.02593917 0.3187854 0.02084831 0.0006494856 0.02073056
## 7       7 0.02589964 0.3209157 0.02084970 0.0006015737 0.02042737
## 8       8 0.02582820 0.3246426 0.02082087 0.0005550767 0.02247168
## 9       9 0.02579308 0.3265747 0.02082103 0.0005507879 0.02199516
## 10     10 0.02571588 0.3305729 0.02078122 0.0005582145 0.02228794
## 11     11 0.02568025 0.3325064 0.02074574 0.0005288941 0.02225932
## 12     12 0.02567812 0.3326191 0.02075803 0.0005158039 0.02239029
## 13     13 0.02564510 0.3343325 0.02073002 0.0005334347 0.02150174
## 14     14 0.02565553 0.3337725 0.02074496 0.0005376892 0.02133207
## 15     15 0.02565558 0.3337869 0.02074086 0.0005389872 0.02180600
## 16     16 0.02565815 0.3336140 0.02074875 0.0005288357 0.02226318
## 17     17 0.02563966 0.3346075 0.02075470 0.0005273379 0.02198116
## 18     18 0.02567450 0.3328184 0.02077682 0.0005577289 0.02193368
## 19     19 0.02567251 0.3329551 0.02077497 0.0005421905 0.02160312
## 20     20 0.02570569 0.3313067 0.02079962 0.0005573473 0.02263287
## 21     21 0.02572145 0.3305660 0.02081432 0.0005350083 0.02107574
## 22     22 0.02571745 0.3308431 0.02080336 0.0005253332 0.02118837
## 23     23 0.02572432 0.3304617 0.02080958 0.0005122830 0.02142865
## 24     24 0.02572928 0.3302298 0.02080454 0.0005156292 0.02249707
## 25     25 0.02573962 0.3297634 0.02080145 0.0005076975 0.02220226
## 26     26 0.02571756 0.3308208 0.02078905 0.0005283183 0.02295738
## 27     27 0.02571703 0.3308756 0.02077874 0.0005229586 0.02378530
## 28     28 0.02571244 0.3311394 0.02076828 0.0005201583 0.02332965
## 29     29 0.02571695 0.3309301 0.02077867 0.0005233092 0.02305921
## 30     30 0.02569731 0.3319392 0.02076254 0.0005228247 0.02225371
## 31     31 0.02569099 0.3322713 0.02076104 0.0005335996 0.02320756
## 32     32 0.02568091 0.3327698 0.02073825 0.0005178291 0.02276693
## 33     33 0.02569096 0.3322634 0.02074979 0.0005145192 0.02236245
## 34     34 0.02567636 0.3330014 0.02074480 0.0005144960 0.02229300
## 35     35 0.02567983 0.3328387 0.02074383 0.0005094589 0.02335017
## 36     36 0.02568686 0.3325299 0.02073976 0.0005056167 0.02344333
## 37     37 0.02568570 0.3326016 0.02074075 0.0005286131 0.02277088
## 38     38 0.02568638 0.3326004 0.02074278 0.0005350692 0.02260502
## 39     39 0.02568286 0.3327595 0.02073485 0.0005287754 0.02230007
## 40     40 0.02568454 0.3327109 0.02073789 0.0005365565 0.02306433
## 41     41 0.02567329 0.3333159 0.02073435 0.0005397255 0.02322542
## 42     42 0.02567772 0.3330906 0.02074146 0.0005350801 0.02269924
## 43     43 0.02566275 0.3338622 0.02073982 0.0005462443 0.02243724
## 44     44 0.02567082 0.3335029 0.02074629 0.0005575857 0.02289903
## 45     45 0.02568473 0.3328624 0.02075953 0.0005595466 0.02341015
## 46     46 0.02569083 0.3326059 0.02076323 0.0005617568 0.02375068
## 47     47 0.02569946 0.3321807 0.02077729 0.0005461298 0.02312327
## 48     48 0.02570694 0.3318366 0.02078093 0.0005608808 0.02398227
## 49     49 0.02570771 0.3317834 0.02079036 0.0005708561 0.02463267
## 50     50 0.02571639 0.3313935 0.02080577 0.0005848652 0.02485342
## 51     51 0.02571066 0.3316740 0.02080134 0.0005812832 0.02520493
## 52     52 0.02571331 0.3315796 0.02079779 0.0005551366 0.02451314
## 53     53 0.02570849 0.3318281 0.02080063 0.0005558138 0.02491113
## 54     54 0.02570599 0.3319673 0.02079482 0.0005546238 0.02485081
## 55     55 0.02570246 0.3321582 0.02078976 0.0005514278 0.02461288
## 56     56 0.02570325 0.3321666 0.02078350 0.0005498309 0.02476050
## 57     57 0.02572034 0.3313728 0.02079365 0.0005534066 0.02533999
## 58     58 0.02572871 0.3309725 0.02080136 0.0005604899 0.02561695
## 59     59 0.02572404 0.3312330 0.02079534 0.0005592884 0.02547807
## 60     60 0.02573404 0.3307269 0.02081168 0.0005618054 0.02569169
## 61     61 0.02573608 0.3306454 0.02080592 0.0005563158 0.02538104
## 62     62 0.02575237 0.3298857 0.02081633 0.0005819762 0.02569464
## 63     63 0.02575054 0.3300064 0.02081864 0.0005803455 0.02543575
## 64     64 0.02575167 0.3299783 0.02082416 0.0005815590 0.02518551
## 65     65 0.02574712 0.3302708 0.02082071 0.0005866952 0.02541418
## 66     66 0.02574612 0.3303458 0.02081968 0.0005979830 0.02526880
## 67     67 0.02575563 0.3298858 0.02082248 0.0006002045 0.02579809
## 68     68 0.02574719 0.3303432 0.02081677 0.0005964441 0.02558502
## 69     69 0.02575625 0.3299513 0.02082526 0.0005982113 0.02548721
## 70     70 0.02576944 0.3293312 0.02083566 0.0006005036 0.02534062
## 71     71 0.02576537 0.3295151 0.02083234 0.0005968164 0.02492351
## 72     72 0.02577316 0.3291135 0.02083680 0.0005939839 0.02487979
## 73     73 0.02578598 0.3285133 0.02084649 0.0005947746 0.02473150
## 74     74 0.02579079 0.3282821 0.02085145 0.0005992973 0.02476445
## 75     75 0.02580140 0.3277739 0.02085912 0.0006020817 0.02458598
## 76     76 0.02580887 0.3274361 0.02086250 0.0006061824 0.02478010
## 77     77 0.02582297 0.3267324 0.02087888 0.0006042877 0.02442560
## 78     78 0.02582579 0.3266021 0.02088487 0.0005979411 0.02389879
## 79     79 0.02582967 0.3264132 0.02088885 0.0005902857 0.02340927
## 80     80 0.02584264 0.3257784 0.02090073 0.0005811177 0.02285018
## 81     81 0.02584181 0.3258403 0.02089819 0.0005832150 0.02282523
## 82     82 0.02585085 0.3254429 0.02090735 0.0005833348 0.02308655
## 83     83 0.02584393 0.3257886 0.02090389 0.0005883268 0.02336236
## 84     84 0.02584308 0.3258459 0.02090300 0.0005910379 0.02309422
## 85     85 0.02583660 0.3261900 0.02089235 0.0005867458 0.02304421
## 86     86 0.02582759 0.3266669 0.02088669 0.0005844546 0.02324747
## 87     87 0.02583165 0.3265219 0.02088934 0.0005748188 0.02283684
## 88     88 0.02582447 0.3269211 0.02088201 0.0005825564 0.02323083
## 89     89 0.02582379 0.3269877 0.02088347 0.0005791658 0.02326645
## 90     90 0.02581768 0.3272874 0.02087990 0.0005795068 0.02311229
## 91     91 0.02581858 0.3272141 0.02087979 0.0005714053 0.02275776
## 92     92 0.02582065 0.3271103 0.02088160 0.0005699833 0.02242137
## 93     93 0.02582024 0.3271564 0.02088697 0.0005674436 0.02241051
## 94     94 0.02582425 0.3269777 0.02089474 0.0005608798 0.02218480
## 95     95 0.02582704 0.3268653 0.02090146 0.0005691854 0.02229040
## 96     96 0.02583780 0.3263118 0.02091120 0.0005709819 0.02275849
## 97     97 0.02583194 0.3265821 0.02091205 0.0005627072 0.02245509
## 98     98 0.02583563 0.3264118 0.02091055 0.0005684523 0.02285248
## 99     99 0.02583277 0.3265603 0.02091241 0.0005807993 0.02295389
## 100   100 0.02583383 0.3265458 0.02091071 0.0005793584 0.02313779
## 101   101 0.02583152 0.3266299 0.02090691 0.0005752158 0.02306261
## 102   102 0.02583074 0.3266810 0.02090676 0.0005730749 0.02271006
## 103   103 0.02583064 0.3266946 0.02090250 0.0005764494 0.02302662
## 104   104 0.02582792 0.3268429 0.02089906 0.0005712405 0.02271117
## 105   105 0.02582193 0.3271517 0.02089296 0.0005760601 0.02286384
## 106   106 0.02582338 0.3270559 0.02089953 0.0005810822 0.02317164
## 107   107 0.02582498 0.3269811 0.02090311 0.0005912515 0.02360844
## 108   108 0.02582454 0.3270319 0.02090503 0.0005928183 0.02388058
## 109   109 0.02582773 0.3268964 0.02091437 0.0005952162 0.02391593
## 110   110 0.02581830 0.3273394 0.02091078 0.0005982447 0.02367525
## 111   111 0.02581400 0.3275396 0.02090208 0.0005971839 0.02355243
## 112   112 0.02580871 0.3277836 0.02089724 0.0005846499 0.02305313
## 113   113 0.02580692 0.3278690 0.02089457 0.0005874713 0.02305678
## 114   114 0.02579791 0.3283203 0.02089060 0.0005846388 0.02306257
## 115   115 0.02579864 0.3282889 0.02089189 0.0005832197 0.02332018
## 116   116 0.02579828 0.3283214 0.02089638 0.0005832395 0.02302931
## 117   117 0.02580671 0.3279436 0.02089777 0.0005921621 0.02303747
## 118   118 0.02580713 0.3279377 0.02089839 0.0005936209 0.02324419
## 119   119 0.02580935 0.3278339 0.02089990 0.0005937796 0.02316200
## 120   120 0.02580867 0.3278473 0.02090396 0.0005962684 0.02299158
## 121   121 0.02580583 0.3280010 0.02090000 0.0005969021 0.02289421
## 122   122 0.02581565 0.3275327 0.02090558 0.0006021020 0.02313400
## 123   123 0.02581667 0.3274748 0.02090347 0.0005963106 0.02304517
## 124   124 0.02581341 0.3276391 0.02089850 0.0005962448 0.02297065
## 125   125 0.02581641 0.3275032 0.02090312 0.0005934410 0.02299640
## 126   126 0.02581309 0.3276502 0.02090341 0.0005937065 0.02276878
## 127   127 0.02581618 0.3274976 0.02090291 0.0005903338 0.02286409
## 128   128 0.02582193 0.3272085 0.02090656 0.0005954018 0.02290651
## 129   129 0.02582484 0.3270686 0.02090478 0.0005912943 0.02286998
## 130   130 0.02582403 0.3271122 0.02090698 0.0005958074 0.02300095
## 131   131 0.02582145 0.3272323 0.02090141 0.0006002880 0.02360411
## 132   132 0.02581396 0.3275983 0.02089774 0.0005963332 0.02356754
## 133   133 0.02581792 0.3274085 0.02089967 0.0005991808 0.02356760
## 134   134 0.02581567 0.3275245 0.02089735 0.0005962420 0.02348222
## 135   135 0.02580863 0.3278732 0.02089413 0.0006065294 0.02400052
## 136   136 0.02580276 0.3281427 0.02088961 0.0006040719 0.02372495
## 137   137 0.02579986 0.3282852 0.02088501 0.0006058636 0.02367396
## 138   138 0.02579827 0.3283987 0.02088374 0.0006063617 0.02353857
## 139   139 0.02579853 0.3283549 0.02088201 0.0006119529 0.02391112
## 140   140 0.02580245 0.3281660 0.02088657 0.0006140684 0.02408106
## 141   141 0.02580331 0.3281539 0.02088633 0.0006152898 0.02405010
## 142   142 0.02580859 0.3279062 0.02088840 0.0006114647 0.02382492
## 143   143 0.02581420 0.3276530 0.02089250 0.0006121345 0.02402210
## 144   144 0.02581371 0.3276844 0.02089098 0.0006126948 0.02391850
## 145   145 0.02581099 0.3278022 0.02088740 0.0006147775 0.02392746
## 146   146 0.02580886 0.3279055 0.02088500 0.0006091901 0.02385621
## 147   147 0.02580716 0.3279902 0.02088560 0.0006085836 0.02382285
## 148   148 0.02580070 0.3283251 0.02088325 0.0006057108 0.02387619
## 149   149 0.02580521 0.3281216 0.02088507 0.0006080115 0.02387607
## 150   150 0.02580528 0.3281351 0.02088574 0.0006084479 0.02386306
## 151   151 0.02580714 0.3280483 0.02088838 0.0006055670 0.02365136
## 152   152 0.02580870 0.3279982 0.02089148 0.0006037103 0.02337011
## 153   153 0.02581028 0.3279218 0.02089359 0.0006028604 0.02341335
## 154   154 0.02580537 0.3281610 0.02089001 0.0006047494 0.02340353
## 155   155 0.02580457 0.3281934 0.02088879 0.0006072620 0.02326140
## 156   156 0.02580915 0.3279849 0.02088981 0.0006000412 0.02305671
## 157   157 0.02580601 0.3281527 0.02088919 0.0005996353 0.02294202
## 158   158 0.02579702 0.3285738 0.02087969 0.0006018117 0.02300721
## 159   159 0.02579981 0.3284511 0.02088321 0.0006071578 0.02320003
## 160   160 0.02579370 0.3287494 0.02087794 0.0006074290 0.02320654
## 161   161 0.02579114 0.3288656 0.02087361 0.0006052544 0.02329639
## 162   162 0.02579552 0.3286747 0.02087494 0.0005993793 0.02312993
## 163   163 0.02579294 0.3287792 0.02087085 0.0006010475 0.02329765
## 164   164 0.02578822 0.3290137 0.02086440 0.0006019867 0.02347243
## 165   165 0.02578609 0.3291135 0.02086158 0.0005981495 0.02350725
## 166   166 0.02578558 0.3291366 0.02086110 0.0005994136 0.02370887
## 167   167 0.02578568 0.3291302 0.02086177 0.0005979542 0.02379997
## 168   168 0.02578841 0.3289942 0.02086653 0.0005970647 0.02375210
## 169   169 0.02578877 0.3289777 0.02087010 0.0005957025 0.02351222
## 170   170 0.02578689 0.3290633 0.02086977 0.0005961564 0.02342855
## 171   171 0.02578683 0.3290835 0.02086831 0.0005931100 0.02289322
## 172   172 0.02578665 0.3290948 0.02086843 0.0005953732 0.02284599
## 173   173 0.02578405 0.3292163 0.02086701 0.0005959394 0.02301613
## 174   174 0.02578408 0.3292187 0.02086561 0.0005949276 0.02298360
## 175   175 0.02578261 0.3292913 0.02086404 0.0005959604 0.02298112
## 176   176 0.02578165 0.3293326 0.02086552 0.0005917618 0.02286919
## 177   177 0.02578270 0.3292775 0.02086708 0.0005872513 0.02270647
## 178   178 0.02578725 0.3290585 0.02087211 0.0005873113 0.02276321
## 179   179 0.02578871 0.3289975 0.02087530 0.0005876232 0.02281102
## 180   180 0.02579264 0.3288038 0.02087779 0.0005823554 0.02271104
## 181   181 0.02579328 0.3287794 0.02087727 0.0005802602 0.02264810
## 182   182 0.02579484 0.3287236 0.02088082 0.0005836557 0.02264377
## 183   183 0.02579159 0.3288893 0.02087840 0.0005821803 0.02254190
## 184   184 0.02579271 0.3288355 0.02088072 0.0005819130 0.02260647
## 185   185 0.02579104 0.3289156 0.02087829 0.0005854382 0.02260853
## 186   186 0.02578973 0.3289715 0.02087898 0.0005867552 0.02255972
## 187   187 0.02579097 0.3289162 0.02087888 0.0005896168 0.02260286
## 188   188 0.02579226 0.3288658 0.02087838 0.0005863259 0.02245571
## 189   189 0.02579283 0.3288465 0.02087870 0.0005846947 0.02239217
## 190   190 0.02578956 0.3289941 0.02087719 0.0005823063 0.02226718
## 191   191 0.02578944 0.3290054 0.02087674 0.0005827214 0.02225446
## 192   192 0.02578890 0.3290219 0.02087652 0.0005838578 0.02238692
## 193   193 0.02578778 0.3290787 0.02087435 0.0005840737 0.02236546
## 194   194 0.02578982 0.3289739 0.02087471 0.0005824844 0.02239950
## 195   195 0.02578970 0.3289887 0.02087341 0.0005815862 0.02238593
## 196   196 0.02579180 0.3288837 0.02087483 0.0005846724 0.02249194
## 197   197 0.02579142 0.3289126 0.02087533 0.0005851812 0.02252236
## 198   198 0.02579054 0.3289585 0.02087409 0.0005854075 0.02249347
## 199   199 0.02578955 0.3290079 0.02087240 0.0005845142 0.02254083
## 200   200 0.02579066 0.3289553 0.02087484 0.0005845773 0.02253186
## 201   201 0.02579044 0.3289670 0.02087368 0.0005826670 0.02247715
## 202   202 0.02579222 0.3288810 0.02087633 0.0005820911 0.02232023
## 203   203 0.02579385 0.3288049 0.02087871 0.0005807395 0.02218610
## 204   204 0.02579630 0.3286833 0.02087973 0.0005824461 0.02222385
## 205   205 0.02579818 0.3285946 0.02088062 0.0005816933 0.02215222
## 206   206 0.02579794 0.3286098 0.02088030 0.0005827950 0.02226411
## 207   207 0.02579800 0.3286101 0.02088055 0.0005856948 0.02236005
## 208   208 0.02579890 0.3285743 0.02088098 0.0005839143 0.02231316
## 209   209 0.02579878 0.3285812 0.02088050 0.0005844861 0.02235314
## 210   210 0.02579904 0.3285756 0.02088019 0.0005868091 0.02241808
## 211   211 0.02579819 0.3286180 0.02087892 0.0005868832 0.02241739
## 212   212 0.02580008 0.3285284 0.02088064 0.0005852382 0.02238595
## 213   213 0.02580063 0.3285014 0.02088142 0.0005857491 0.02238421
## 214   214 0.02580032 0.3285186 0.02088129 0.0005856118 0.02240728
## 215   215 0.02579945 0.3285596 0.02087994 0.0005856497 0.02243807
## 216   216 0.02579970 0.3285507 0.02088041 0.0005865858 0.02248964
## 217   217 0.02580044 0.3285194 0.02088090 0.0005873269 0.02251864
## 218   218 0.02580116 0.3284874 0.02088125 0.0005879340 0.02253196
## 219   219 0.02580166 0.3284622 0.02088203 0.0005889135 0.02251416
## 220   220 0.02580203 0.3284464 0.02088195 0.0005886014 0.02248331
## 221   221 0.02580261 0.3284173 0.02088168 0.0005880471 0.02244028
## 222   222 0.02580418 0.3283414 0.02088338 0.0005884707 0.02243783
## 223   223 0.02580430 0.3283402 0.02088385 0.0005894817 0.02246021
## 224   224 0.02580460 0.3283266 0.02088451 0.0005893404 0.02246173
## 225   225 0.02580541 0.3282875 0.02088516 0.0005894300 0.02242379
## 226   226 0.02580533 0.3282908 0.02088529 0.0005894448 0.02244340
## 227   227 0.02580573 0.3282716 0.02088536 0.0005888277 0.02240785
## 228   228 0.02580530 0.3282953 0.02088497 0.0005896620 0.02240748
## 229   229 0.02580519 0.3282996 0.02088502 0.0005895789 0.02242132
## 230   230 0.02580500 0.3283086 0.02088452 0.0005890612 0.02242948
## 231   231 0.02580437 0.3283363 0.02088425 0.0005887867 0.02240553
## 232   232 0.02580456 0.3283292 0.02088410 0.0005887680 0.02239511
## 233   233 0.02580418 0.3283463 0.02088389 0.0005892820 0.02241764
## 234   234 0.02580336 0.3283860 0.02088309 0.0005897805 0.02244912
## 235   235 0.02580288 0.3284094 0.02088269 0.0005892806 0.02243559
## 236   236 0.02580313 0.3283991 0.02088306 0.0005894619 0.02244298
## 237   237 0.02580313 0.3283989 0.02088291 0.0005895992 0.02245239
## 238   238 0.02580317 0.3283959 0.02088292 0.0005895545 0.02245788
## 239   239 0.02580340 0.3283851 0.02088304 0.0005896238 0.02245255
## 240   240 0.02580348 0.3283813 0.02088310 0.0005896638 0.02245074
##            MAESD
## 1   0.0004604171
## 2   0.0005295821
## 3   0.0005663676
## 4   0.0004831073
## 5   0.0004993662
## 6   0.0005070940
## 7   0.0004687843
## 8   0.0004502107
## 9   0.0004598547
## 10  0.0004621940
## 11  0.0004355906
## 12  0.0004338364
## 13  0.0004558980
## 14  0.0004441391
## 15  0.0004482284
## 16  0.0004294657
## 17  0.0004146737
## 18  0.0004557981
## 19  0.0004466196
## 20  0.0004453456
## 21  0.0004429048
## 22  0.0004518993
## 23  0.0004379285
## 24  0.0004465630
## 25  0.0004592782
## 26  0.0004735025
## 27  0.0004848565
## 28  0.0004793809
## 29  0.0004684547
## 30  0.0004624349
## 31  0.0004643623
## 32  0.0004527079
## 33  0.0004616697
## 34  0.0004666074
## 35  0.0004623012
## 36  0.0004550907
## 37  0.0004761892
## 38  0.0004813189
## 39  0.0004787012
## 40  0.0004864702
## 41  0.0004938228
## 42  0.0004864273
## 43  0.0005055377
## 44  0.0005171790
## 45  0.0005255145
## 46  0.0005269799
## 47  0.0005133467
## 48  0.0005206387
## 49  0.0005348896
## 50  0.0005518380
## 51  0.0005513937
## 52  0.0005319036
## 53  0.0005451481
## 54  0.0005399562
## 55  0.0005412661
## 56  0.0005417183
## 57  0.0005427385
## 58  0.0005482809
## 59  0.0005433028
## 60  0.0005533246
## 61  0.0005445195
## 62  0.0005663192
## 63  0.0005654507
## 64  0.0005721150
## 65  0.0005858309
## 66  0.0005973548
## 67  0.0005946034
## 68  0.0005975848
## 69  0.0006092531
## 70  0.0006100484
## 71  0.0006073626
## 72  0.0006003446
## 73  0.0005948896
## 74  0.0005956492
## 75  0.0005991201
## 76  0.0005982856
## 77  0.0005976154
## 78  0.0005907601
## 79  0.0005830541
## 80  0.0005777160
## 81  0.0005747003
## 82  0.0005809020
## 83  0.0005808642
## 84  0.0005815116
## 85  0.0005776121
## 86  0.0005666066
## 87  0.0005665141
## 88  0.0005726490
## 89  0.0005676991
## 90  0.0005676389
## 91  0.0005607866
## 92  0.0005606549
## 93  0.0005647758
## 94  0.0005588748
## 95  0.0005644994
## 96  0.0005638504
## 97  0.0005505650
## 98  0.0005497295
## 99  0.0005673417
## 100 0.0005657319
## 101 0.0005666750
## 102 0.0005716548
## 103 0.0005732765
## 104 0.0005667196
## 105 0.0005692025
## 106 0.0005764886
## 107 0.0005847642
## 108 0.0005865836
## 109 0.0005913864
## 110 0.0005963548
## 111 0.0005975086
## 112 0.0005876688
## 113 0.0005843044
## 114 0.0005842278
## 115 0.0005826612
## 116 0.0005854666
## 117 0.0005910795
## 118 0.0005902498
## 119 0.0005857763
## 120 0.0005844423
## 121 0.0005893846
## 122 0.0005913034
## 123 0.0005875533
## 124 0.0005896728
## 125 0.0005903994
## 126 0.0005879755
## 127 0.0005823860
## 128 0.0005829748
## 129 0.0005789386
## 130 0.0005813008
## 131 0.0005852766
## 132 0.0005799561
## 133 0.0005839250
## 134 0.0005860830
## 135 0.0005966914
## 136 0.0005931020
## 137 0.0005923046
## 138 0.0005923159
## 139 0.0005966040
## 140 0.0005974019
## 141 0.0005971932
## 142 0.0005907731
## 143 0.0005940754
## 144 0.0005958785
## 145 0.0005954800
## 146 0.0005924315
## 147 0.0005960955
## 148 0.0005934932
## 149 0.0005958900
## 150 0.0005932081
## 151 0.0005867798
## 152 0.0005877906
## 153 0.0005901562
## 154 0.0005924916
## 155 0.0005954319
## 156 0.0005881171
## 157 0.0005876005
## 158 0.0005890193
## 159 0.0005940532
## 160 0.0005912538
## 161 0.0005920677
## 162 0.0005853406
## 163 0.0005855101
## 164 0.0005871683
## 165 0.0005878264
## 166 0.0005876699
## 167 0.0005861051
## 168 0.0005854185
## 169 0.0005872321
## 170 0.0005846785
## 171 0.0005813525
## 172 0.0005836572
## 173 0.0005823769
## 174 0.0005827727
## 175 0.0005834631
## 176 0.0005810244
## 177 0.0005755621
## 178 0.0005745948
## 179 0.0005771867
## 180 0.0005760616
## 181 0.0005738939
## 182 0.0005760492
## 183 0.0005756568
## 184 0.0005760078
## 185 0.0005801453
## 186 0.0005811294
## 187 0.0005815796
## 188 0.0005794959
## 189 0.0005798678
## 190 0.0005785363
## 191 0.0005786231
## 192 0.0005778584
## 193 0.0005769357
## 194 0.0005766286
## 195 0.0005771973
## 196 0.0005792515
## 197 0.0005788342
## 198 0.0005800121
## 199 0.0005790795
## 200 0.0005807527
## 201 0.0005791041
## 202 0.0005783918
## 203 0.0005781571
## 204 0.0005812340
## 205 0.0005809667
## 206 0.0005828377
## 207 0.0005862895
## 208 0.0005848515
## 209 0.0005862627
## 210 0.0005877891
## 211 0.0005883912
## 212 0.0005876101
## 213 0.0005885022
## 214 0.0005867225
## 215 0.0005876808
## 216 0.0005872857
## 217 0.0005882544
## 218 0.0005899023
## 219 0.0005910193
## 220 0.0005901838
## 221 0.0005899732
## 222 0.0005903717
## 223 0.0005912456
## 224 0.0005909634
## 225 0.0005908327
## 226 0.0005909852
## 227 0.0005902674
## 228 0.0005909874
## 229 0.0005907499
## 230 0.0005903754
## 231 0.0005900148
## 232 0.0005902183
## 233 0.0005905081
## 234 0.0005910302
## 235 0.0005905733
## 236 0.0005909074
## 237 0.0005907941
## 238 0.0005905927
## 239 0.0005905728
## 240 0.0005905793
##    nvmax
## 17    17

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##   (Intercept)            x4            x7            x8            x9 
##  1.956954e+00 -5.190467e-05  1.236547e-02  5.936782e-04  3.236841e-03 
##           x10           x11           x16           x17           x21 
##  1.456647e-03  2.465398e+05  8.171441e-04  1.482294e-03  1.229679e-04 
##        stat14        stat23        stat60        stat98       stat110 
## -8.357780e-04  7.089590e-04  6.399162e-04  3.329090e-03 -3.310512e-03 
##       stat114       stat144      sqrt.x18 
##  5.940544e-04  6.184670e-04  2.663657e-02

Test

if (algo.backward.caret == TRUE){
  test.model(model.backward, data.test
             ,method = 'leapBackward',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.033   2.080   2.093   2.093   2.106   2.149 
## [1] "leapBackward  Test MSE: 0.0010547435861647"

Stepwise Selection (w/ full train)

Train

if (algo.stepwise == TRUE){
  t1 = Sys.time()
  
  model.stepwise = step(model.null, scope=list(upper=model.full), data = data.train, direction="both", trace = 0)
  print(summary(model.stepwise))

  t2 = Sys.time()
  print (paste("Time taken for Stepwise Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.stepwise, data.train)
}

Test

if (algo.stepwise == TRUE){
  test.model(model.stepwise, data.test, "Stepwise Selection")
}

Stepwise Selection (w/ filtered train)

Train

if (algo.stepwise == TRUE){
  t1 = Sys.time()
  
  model.stepwise2 = step(model.null2, scope=list(upper=model.full2), data = data.train2, direction="both", trace = 0)
  print(summary(model.stepwise2))

  t2 = Sys.time()
  print (paste("Time taken for Stepwise Selection: ",t2-t1, sep = ""))
  
  plot.diagnostics(model.stepwise2, data.train2)
}

Test

if (algo.stepwise == TRUE){
  test.model(model.stepwise2, data.test, "Stepwise Selection (2)")
}

Stepwise Selection with CV (w/ full train)

Train

if (algo.stepwise.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "leapSeq"
                                   ,feature.names = feature.names)
  model.stepwise = returned$model
  id = returned$id
}
## Aggregating results
## Selecting tuning parameters
## Fitting nvmax = 7 on full training set
##     nvmax       RMSE  Rsquared        MAE       RMSESD RsquaredSD
## 1       1 0.03410610 0.1149630 0.02657577 0.0012861157 0.02115810
## 2       2 0.03328304 0.1574261 0.02582705 0.0011082962 0.02564652
## 3       3 0.03267992 0.1875344 0.02522129 0.0010042512 0.02555459
## 4       4 0.03218662 0.2115343 0.02451101 0.0010005851 0.02634272
## 5       5 0.03186632 0.2271994 0.02432100 0.0009679660 0.02729495
## 6       6 0.03185480 0.2277527 0.02432260 0.0009428484 0.02753486
## 7       7 0.03174204 0.2329557 0.02426652 0.0009365345 0.02626930
## 8       8 0.03177661 0.2313360 0.02430354 0.0009495410 0.02509086
## 9       9 0.03177342 0.2315882 0.02429654 0.0009401013 0.02649936
## 10     10 0.03174421 0.2330436 0.02428987 0.0009336330 0.02724717
## 11     11 0.03175240 0.2327719 0.02430959 0.0009426783 0.02703386
## 12     12 0.03174790 0.2330175 0.02429329 0.0009361445 0.02713821
## 13     13 0.03176566 0.2322273 0.02431153 0.0009301109 0.02725054
## 14     14 0.03174821 0.2331087 0.02430265 0.0008876305 0.02909070
## 15     15 0.03175781 0.2326158 0.02430769 0.0009302689 0.02811104
## 16     16 0.03211926 0.2149269 0.02460232 0.0016589574 0.05640281
## 17     17 0.03178911 0.2311487 0.02432604 0.0009658569 0.02635184
## 18     18 0.03177643 0.2317125 0.02431717 0.0009793686 0.02490692
## 19     19 0.03175964 0.2325262 0.02429739 0.0009456329 0.02627921
## 20     20 0.03175338 0.2328159 0.02429680 0.0009786020 0.02653806
## 21     21 0.03174981 0.2329992 0.02429780 0.0009964310 0.02533156
## 22     22 0.03176674 0.2322271 0.02429902 0.0009904555 0.02511125
## 23     23 0.03175392 0.2328407 0.02429455 0.0009774665 0.02551565
## 24     24 0.03282883 0.1803104 0.02519597 0.0023155572 0.08101563
## 25     25 0.03209541 0.2150686 0.02458775 0.0011601513 0.06094698
## 26     26 0.03241861 0.1992981 0.02482324 0.0018465750 0.07735846
## 27     27 0.03215188 0.2127444 0.02458993 0.0011141649 0.05511356
## 28     28 0.03180694 0.2305555 0.02432526 0.0009425082 0.02679435
## 29     29 0.03181979 0.2300975 0.02434237 0.0009522165 0.02721588
## 30     30 0.03211578 0.2142500 0.02463198 0.0011747173 0.05962565
## 31     31 0.03237591 0.2000485 0.02485706 0.0013293160 0.08169943
## 32     32 0.03233612 0.2022149 0.02474432 0.0014210357 0.08154324
## 33     33 0.03183478 0.2294410 0.02434773 0.0009821203 0.02653906
## 34     34 0.03184403 0.2290906 0.02437048 0.0009826630 0.02640569
## 35     35 0.03250160 0.1957711 0.02494104 0.0016536733 0.07164746
## 36     36 0.03221350 0.2122502 0.02465580 0.0019058140 0.04598983
## 37     37 0.03190358 0.2264476 0.02440120 0.0009828211 0.02468246
## 38     38 0.03189749 0.2267378 0.02440712 0.0009965136 0.02346400
## 39     39 0.03212960 0.2141372 0.02456237 0.0012831713 0.05657506
## 40     40 0.03188959 0.2271720 0.02441256 0.0009994361 0.02341634
## 41     41 0.03216293 0.2123877 0.02463264 0.0012073624 0.06311081
## 42     42 0.03248341 0.1980727 0.02484670 0.0020466349 0.06508109
## 43     43 0.03193239 0.2253818 0.02444486 0.0010081867 0.02329288
## 44     44 0.03229425 0.2067995 0.02473590 0.0011282630 0.05080137
## 45     45 0.03226414 0.2091858 0.02474148 0.0016519816 0.05380955
## 46     46 0.03193405 0.2254265 0.02445603 0.0009882143 0.02454566
## 47     47 0.03193352 0.2254485 0.02445708 0.0009984702 0.02437626
## 48     48 0.03256762 0.1943422 0.02495475 0.0019731802 0.07120842
## 49     49 0.03223562 0.2100239 0.02472631 0.0012259703 0.05111596
## 50     50 0.03195650 0.2245477 0.02447378 0.0009856750 0.02497266
## 51     51 0.03196159 0.2243297 0.02448205 0.0009940737 0.02482403
## 52     52 0.03196892 0.2240017 0.02447639 0.0010003420 0.02481363
## 53     53 0.03257242 0.1935117 0.02503046 0.0017588808 0.06676153
## 54     54 0.03259778 0.1910291 0.02497816 0.0012915290 0.07599770
## 55     55 0.03225370 0.2087661 0.02473797 0.0010950232 0.05601349
## 56     56 0.03229253 0.2091512 0.02472056 0.0019238341 0.04665426
## 57     57 0.03259075 0.1934163 0.02499611 0.0018924363 0.06449884
## 58     58 0.03196489 0.2243476 0.02447793 0.0010191719 0.02536728
## 59     59 0.03227592 0.2079756 0.02475775 0.0010783505 0.05623206
## 60     60 0.03229540 0.2074869 0.02478128 0.0012692020 0.05501859
## 61     61 0.03199268 0.2231779 0.02449705 0.0009959766 0.02529087
## 62     62 0.03219441 0.2117503 0.02464888 0.0012774142 0.05750873
## 63     63 0.03298815 0.1739322 0.02532744 0.0022636622 0.07631895
## 64     64 0.03289132 0.1768382 0.02525706 0.0013835740 0.07592171
## 65     65 0.03252690 0.1952872 0.02492907 0.0014500114 0.07353741
## 66     66 0.03283150 0.1792645 0.02521179 0.0018147869 0.08321847
## 67     67 0.03200293 0.2228634 0.02450889 0.0009897985 0.02631332
## 68     68 0.03333049 0.1555654 0.02560361 0.0019689929 0.08672999
## 69     69 0.03316138 0.1639111 0.02544118 0.0020431124 0.08380379
## 70     70 0.03274195 0.1868649 0.02514319 0.0020458536 0.07073018
## 71     71 0.03254776 0.1934834 0.02497525 0.0013035213 0.07876418
## 72     72 0.03259936 0.1912174 0.02504414 0.0011373458 0.07200664
## 73     73 0.03232360 0.2071354 0.02473704 0.0013259988 0.04911588
## 74     74 0.03231690 0.2064387 0.02478512 0.0011828164 0.05812581
## 75     75 0.03323928 0.1616505 0.02546019 0.0023840683 0.07914280
## 76     76 0.03237452 0.2044338 0.02481586 0.0012769940 0.05676240
## 77     77 0.03313903 0.1678332 0.02538913 0.0022451230 0.07294021
## 78     78 0.03238459 0.2047211 0.02484257 0.0016077549 0.05345208
## 79     79 0.03258366 0.1931998 0.02496386 0.0014389802 0.07300195
## 80     80 0.03207236 0.2201539 0.02455813 0.0009834858 0.02803806
## 81     81 0.03235293 0.2059608 0.02476558 0.0013093083 0.04885907
## 82     82 0.03239624 0.2035417 0.02482618 0.0012849801 0.05715164
## 83     83 0.03245366 0.2014963 0.02485597 0.0016493830 0.05805375
## 84     84 0.03239949 0.2041724 0.02485379 0.0015792852 0.05188819
## 85     85 0.03228875 0.2080722 0.02468667 0.0012639879 0.05703009
## 86     86 0.03237868 0.2039564 0.02482511 0.0010080674 0.05412163
## 87     87 0.03210356 0.2188564 0.02457060 0.0009851321 0.02785877
## 88     88 0.03246338 0.2020755 0.02483721 0.0019546927 0.05074489
## 89     89 0.03211609 0.2182976 0.02458036 0.0009808036 0.02763042
## 90     90 0.03210117 0.2190102 0.02456896 0.0009863785 0.02825073
## 91     91 0.03304188 0.1724487 0.02531407 0.0021383158 0.07134435
## 92     92 0.03246262 0.2005750 0.02484400 0.0011241658 0.05378501
## 93     93 0.03257410 0.1928503 0.02495444 0.0013866349 0.07476945
## 94     94 0.03247654 0.2007705 0.02487414 0.0016558227 0.05873577
## 95     95 0.03210040 0.2192415 0.02456970 0.0009769006 0.02855142
## 96     96 0.03276104 0.1864404 0.02512267 0.0019211575 0.06573290
## 97     97 0.03274925 0.1877231 0.02506480 0.0021247668 0.06233909
## 98     98 0.03240481 0.2034406 0.02484317 0.0012521020 0.05650603
## 99     99 0.03211732 0.2185770 0.02458936 0.0009655235 0.02837108
## 100   100 0.03279035 0.1847521 0.02516988 0.0015841894 0.06490836
## 101   101 0.03276582 0.1858219 0.02511647 0.0017499201 0.07979457
## 102   102 0.03233659 0.2064519 0.02472461 0.0012848699 0.05944033
## 103   103 0.03242725 0.2022871 0.02486848 0.0009823160 0.05536879
## 104   104 0.03274622 0.1870312 0.02517483 0.0016946199 0.06783572
## 105   105 0.03232916 0.2068404 0.02472680 0.0012845203 0.05932755
## 106   106 0.03304363 0.1703387 0.02540484 0.0016136926 0.08098415
## 107   107 0.03239277 0.2047053 0.02479968 0.0013082155 0.04906237
## 108   108 0.03276245 0.1866209 0.02510418 0.0018275654 0.06796938
## 109   109 0.03244999 0.2014355 0.02488959 0.0010011122 0.05543930
## 110   110 0.03214482 0.2176040 0.02460990 0.0009870561 0.02818638
## 111   111 0.03272864 0.1875685 0.02504358 0.0018351050 0.07649956
## 112   112 0.03269290 0.1891129 0.02506228 0.0014507887 0.07353850
## 113   113 0.03297536 0.1736356 0.02527700 0.0018556467 0.08556914
## 114   114 0.03241320 0.2034111 0.02484082 0.0012132860 0.06425824
## 115   115 0.03305682 0.1704426 0.02539493 0.0013465786 0.07471932
## 116   116 0.03251320 0.1996044 0.02490583 0.0016703408 0.05915495
## 117   117 0.03312782 0.1698228 0.02543830 0.0023680359 0.06921822
## 118   118 0.03242369 0.2029767 0.02484708 0.0012025350 0.06384521
## 119   119 0.03243619 0.2028915 0.02484124 0.0013332787 0.05026899
## 120   120 0.03242022 0.2043535 0.02485374 0.0014404317 0.04469422
## 121   121 0.03252591 0.1968981 0.02494054 0.0012469272 0.05854280
## 122   122 0.03275149 0.1869262 0.02512400 0.0009048483 0.05692935
## 123   123 0.03275282 0.1878138 0.02512222 0.0014183162 0.05465716
## 124   124 0.03254242 0.1961844 0.02495684 0.0012560789 0.05897256
## 125   125 0.03263963 0.1932637 0.02505210 0.0015572354 0.05175062
## 126   126 0.03273151 0.1893005 0.02510157 0.0017938955 0.05807539
## 127   127 0.03233508 0.2071176 0.02473006 0.0012017460 0.05157564
## 128   128 0.03243324 0.2029735 0.02486546 0.0009228015 0.04664238
## 129   129 0.03273711 0.1880768 0.02509641 0.0011570328 0.06640368
## 130   130 0.03233926 0.2070006 0.02474230 0.0011992653 0.05141405
## 131   131 0.03288517 0.1798324 0.02526493 0.0011435016 0.06870453
## 132   132 0.03234856 0.2080943 0.02481654 0.0012697575 0.03542431
## 133   133 0.03278756 0.1856771 0.02511570 0.0016008352 0.05637292
## 134   134 0.03218274 0.2162132 0.02464776 0.0010068218 0.02777427
## 135   135 0.03279191 0.1848163 0.02512615 0.0010797685 0.05653861
## 136   136 0.03239629 0.2052970 0.02480013 0.0008914681 0.04033008
## 137   137 0.03252216 0.1988631 0.02493976 0.0013069565 0.05249076
## 138   138 0.03217889 0.2164334 0.02464307 0.0010136988 0.02738787
## 139   139 0.03217696 0.2164877 0.02463722 0.0010174389 0.02705326
## 140   140 0.03267881 0.1893920 0.02504305 0.0013274950 0.06095029
## 141   141 0.03218443 0.2161928 0.02463989 0.0010208299 0.02693775
## 142   142 0.03260225 0.1958824 0.02507148 0.0016185036 0.04216231
## 143   143 0.03264340 0.1930923 0.02501013 0.0010986146 0.04339204
## 144   144 0.03220393 0.2153952 0.02465968 0.0010136477 0.02700242
## 145   145 0.03237407 0.2062210 0.02476093 0.0012205137 0.05012499
## 146   146 0.03264253 0.1928145 0.02499230 0.0010059415 0.05427841
## 147   147 0.03246579 0.2021898 0.02486344 0.0009881039 0.03905779
## 148   148 0.03239417 0.2058036 0.02480383 0.0011742174 0.03727277
## 149   149 0.03278908 0.1867535 0.02515068 0.0018708417 0.05568879
## 150   150 0.03236611 0.2062796 0.02481525 0.0010909673 0.04337332
## 151   151 0.03286938 0.1814134 0.02522398 0.0007685191 0.04808198
## 152   152 0.03240807 0.2051952 0.02481308 0.0011819189 0.03726666
## 153   153 0.03222093 0.2146720 0.02465821 0.0010361823 0.02691990
## 154   154 0.03284688 0.1831146 0.02518447 0.0012298273 0.05565941
## 155   155 0.03222568 0.2144840 0.02466137 0.0010258089 0.02704617
## 156   156 0.03346262 0.1517340 0.02574161 0.0013816858 0.05763817
## 157   157 0.03223805 0.2139676 0.02466573 0.0010311105 0.02721483
## 158   158 0.03249287 0.2017395 0.02489567 0.0014418380 0.04438291
## 159   159 0.03242227 0.2043929 0.02479954 0.0011134152 0.05030422
## 160   160 0.03267059 0.1923989 0.02500186 0.0016097881 0.06097249
## 161   161 0.03272187 0.1912690 0.02510586 0.0018867339 0.04788721
## 162   162 0.03222553 0.2145163 0.02465750 0.0010336107 0.02704354
## 163   163 0.03307779 0.1716100 0.02535995 0.0016500439 0.06171150
## 164   164 0.03221908 0.2147781 0.02465538 0.0010323753 0.02687193
## 165   165 0.03266885 0.1925422 0.02502351 0.0014827960 0.05985727
## 166   166 0.03239995 0.2053253 0.02485827 0.0010900803 0.03932986
## 167   167 0.03260356 0.1951139 0.02491845 0.0013365792 0.06649378
## 168   168 0.03257180 0.1964623 0.02502186 0.0013491707 0.04867899
## 169   169 0.03248084 0.2023802 0.02489528 0.0014612445 0.04515946
## 170   170 0.03259162 0.1960716 0.02499438 0.0013587709 0.05423047
## 171   171 0.03222250 0.2147113 0.02466286 0.0010283200 0.02705396
## 172   172 0.03222393 0.2146528 0.02466829 0.0010252256 0.02699643
## 173   173 0.03245234 0.2041939 0.02487745 0.0016097430 0.03542971
## 174   174 0.03298623 0.1772855 0.02528479 0.0012388986 0.05321625
## 175   175 0.03222688 0.2145505 0.02466627 0.0010305452 0.02698883
## 176   176 0.03223017 0.2144153 0.02467052 0.0010336480 0.02679063
## 177   177 0.03249353 0.2012871 0.02488427 0.0009989004 0.03806167
## 178   178 0.03250905 0.2012687 0.02492783 0.0014939364 0.04621292
## 179   179 0.03242742 0.2044618 0.02483726 0.0011979018 0.03784734
## 180   180 0.03260993 0.1955626 0.02502549 0.0014219431 0.04298846
## 181   181 0.03270422 0.1913350 0.02507046 0.0015211676 0.06083643
## 182   182 0.03242496 0.2044994 0.02481382 0.0011148913 0.05048274
## 183   183 0.03244749 0.2034140 0.02482378 0.0009295843 0.04079385
## 184   184 0.03242398 0.2045284 0.02481707 0.0011187735 0.05053383
## 185   185 0.03222617 0.2145783 0.02467325 0.0010385848 0.02728563
## 186   186 0.03270729 0.1913902 0.02505777 0.0016844958 0.06387269
## 187   187 0.03289159 0.1822055 0.02521764 0.0016019211 0.04768519
## 188   188 0.03271595 0.1910402 0.02506996 0.0016916118 0.06414302
## 189   189 0.03222725 0.2145114 0.02467832 0.0010422847 0.02720718
## 190   190 0.03222934 0.2144297 0.02468033 0.0010440887 0.02729145
## 191   191 0.03222885 0.2144459 0.02467938 0.0010426642 0.02733943
## 192   192 0.03222873 0.2144454 0.02467910 0.0010437582 0.02739262
## 193   193 0.03223054 0.2143701 0.02468088 0.0010438126 0.02745762
## 194   194 0.03269836 0.1912423 0.02502538 0.0012588830 0.05785350
## 195   195 0.03243547 0.2041273 0.02484590 0.0012201843 0.03905888
## 196   196 0.03252376 0.2007439 0.02494787 0.0015375568 0.04803153
## 197   197 0.03271104 0.1903672 0.02504747 0.0008196595 0.04539275
## 198   198 0.03241980 0.2045562 0.02489094 0.0011153328 0.04001522
## 199   199 0.03244998 0.2033016 0.02483737 0.0009445484 0.04134277
## 200   200 0.03223011 0.2143799 0.02468214 0.0010477634 0.02755269
## 201   201 0.03241837 0.2046066 0.02488992 0.0011188097 0.04009894
## 202   202 0.03223170 0.2143143 0.02468308 0.0010448119 0.02759767
## 203   203 0.03249471 0.2012975 0.02489545 0.0010046360 0.03826399
## 204   204 0.03223128 0.2143422 0.02468494 0.0010437813 0.02748991
## 205   205 0.03223160 0.2143291 0.02468513 0.0010444590 0.02757676
## 206   206 0.03223155 0.2143227 0.02468628 0.0010464766 0.02756180
## 207   207 0.03245677 0.2030212 0.02484584 0.0009477787 0.04166573
## 208   208 0.03244545 0.2040013 0.02482601 0.0013569545 0.05499953
## 209   209 0.03223383 0.2142319 0.02468739 0.0010453155 0.02759553
## 210   210 0.03223320 0.2142497 0.02468694 0.0010465787 0.02753182
## 211   211 0.03269911 0.1925198 0.02506673 0.0018671964 0.05860282
## 212   212 0.03265499 0.1930796 0.02500867 0.0010830323 0.04642058
## 213   213 0.03223343 0.2142513 0.02468865 0.0010461559 0.02751035
## 214   214 0.03245071 0.2039037 0.02483228 0.0013620020 0.05495304
## 215   215 0.03223403 0.2142238 0.02468962 0.0010456569 0.02733998
## 216   216 0.03223412 0.2142200 0.02469021 0.0010437127 0.02725333
## 217   217 0.03253702 0.2002844 0.02495545 0.0015561583 0.04835077
## 218   218 0.03271720 0.1918776 0.02508091 0.0018928658 0.05962750
## 219   219 0.03243091 0.2040993 0.02491197 0.0011267901 0.04066818
## 220   220 0.03242743 0.2040563 0.02488889 0.0011401401 0.04617853
## 221   221 0.03264785 0.1939217 0.02497485 0.0012311441 0.05891513
## 222   222 0.03223526 0.2141694 0.02469290 0.0010456283 0.02720763
## 223   223 0.03243332 0.2040111 0.02491421 0.0011295493 0.04078651
## 224   224 0.03245215 0.2036891 0.02491210 0.0014086216 0.04099450
## 225   225 0.03223502 0.2141819 0.02469298 0.0010445208 0.02725529
## 226   226 0.03243786 0.2042074 0.02484177 0.0011371523 0.05092127
## 227   227 0.03223413 0.2142199 0.02469274 0.0010449003 0.02723661
## 228   228 0.03249782 0.2022641 0.02493262 0.0017245235 0.03937861
## 229   229 0.03265988 0.1937400 0.02501446 0.0014782201 0.05951571
## 230   230 0.03250115 0.2012147 0.02490329 0.0010145338 0.03807589
## 231   231 0.03294674 0.1798964 0.02532156 0.0016182348 0.05546818
## 232   232 0.03244959 0.2032430 0.02490565 0.0011714890 0.04824992
## 233   233 0.03285569 0.1833735 0.02530027 0.0015113833 0.05125967
## 234   234 0.03223392 0.2142269 0.02469274 0.0010457886 0.02725864
## 235   235 0.03244302 0.2037073 0.02484680 0.0009360417 0.03961460
## 236   236 0.03317742 0.1682341 0.02554772 0.0014218553 0.05160845
## 237   237 0.03313482 0.1698927 0.02547479 0.0014243469 0.05975247
## 238   238 0.03324611 0.1660447 0.02562257 0.0018436969 0.05124826
## 239   239 0.03312973 0.1708756 0.02553320 0.0017869280 0.05201908
## 240   240 0.03223413 0.2142186 0.02469321 0.0010461107 0.02727501
##            MAESD
## 1   0.0006039987
## 2   0.0005560515
## 3   0.0005161705
## 4   0.0005030416
## 5   0.0004516171
## 6   0.0004274403
## 7   0.0004386333
## 8   0.0004116784
## 9   0.0004027648
## 10  0.0003804190
## 11  0.0003827175
## 12  0.0003714371
## 13  0.0003718175
## 14  0.0003455862
## 15  0.0003658994
## 16  0.0011127009
## 17  0.0003957368
## 18  0.0004141335
## 19  0.0003935137
## 20  0.0004296221
## 21  0.0004388117
## 22  0.0004236905
## 23  0.0004130209
## 24  0.0015775448
## 25  0.0008449812
## 26  0.0013036300
## 27  0.0007547808
## 28  0.0004085353
## 29  0.0004149682
## 30  0.0008792404
## 31  0.0011119828
## 32  0.0010121724
## 33  0.0004354754
## 34  0.0004395554
## 35  0.0012482211
## 36  0.0010576188
## 37  0.0004392410
## 38  0.0004605232
## 39  0.0007178040
## 40  0.0004695298
## 41  0.0008813114
## 42  0.0011705339
## 43  0.0004917127
## 44  0.0007111468
## 45  0.0011818184
## 46  0.0004753556
## 47  0.0004734905
## 48  0.0012402559
## 49  0.0008601080
## 50  0.0004511619
## 51  0.0004427788
## 52  0.0004748062
## 53  0.0013011698
## 54  0.0010152414
## 55  0.0007785009
## 56  0.0010627563
## 57  0.0011516998
## 58  0.0004619953
## 59  0.0007652451
## 60  0.0008966005
## 61  0.0004482238
## 62  0.0007188133
## 63  0.0014967725
## 64  0.0011747955
## 65  0.0011522404
## 66  0.0013298873
## 67  0.0004425742
## 68  0.0015170473
## 69  0.0014324750
## 70  0.0014989525
## 71  0.0010586854
## 72  0.0009230817
## 73  0.0009113305
## 74  0.0008430655
## 75  0.0015875591
## 76  0.0009212945
## 77  0.0014489533
## 78  0.0011305868
## 79  0.0011414438
## 80  0.0004694116
## 81  0.0009021172
## 82  0.0009181138
## 83  0.0011499452
## 84  0.0011156066
## 85  0.0007122391
## 86  0.0007046124
## 87  0.0004611376
## 88  0.0010679946
## 89  0.0004528057
## 90  0.0004514514
## 91  0.0013852193
## 92  0.0007365208
## 93  0.0009726072
## 94  0.0011484765
## 95  0.0004588348
## 96  0.0011437201
## 97  0.0013033810
## 98  0.0009003507
## 99  0.0004546460
## 100 0.0011675947
## 101 0.0013553714
## 102 0.0007464478
## 103 0.0007337989
## 104 0.0012938032
## 105 0.0007421953
## 106 0.0012975227
## 107 0.0008976272
## 108 0.0013508162
## 109 0.0007339981
## 110 0.0004534559
## 111 0.0012652426
## 112 0.0011603051
## 113 0.0013499275
## 114 0.0009012387
## 115 0.0011555386
## 116 0.0011608498
## 117 0.0015535712
## 118 0.0008889101
## 119 0.0009281055
## 120 0.0009074940
## 121 0.0008387336
## 122 0.0007123429
## 123 0.0009610969
## 124 0.0008405230
## 125 0.0011075471
## 126 0.0012151338
## 127 0.0006195299
## 128 0.0005963730
## 129 0.0008949365
## 130 0.0006240965
## 131 0.0010349946
## 132 0.0007569814
## 133 0.0009219061
## 134 0.0004541498
## 135 0.0008053882
## 136 0.0004316646
## 137 0.0008587860
## 138 0.0004578164
## 139 0.0004576245
## 140 0.0009058995
## 141 0.0004591899
## 142 0.0010163552
## 143 0.0007295024
## 144 0.0004475773
## 145 0.0006326773
## 146 0.0006655580
## 147 0.0005395318
## 148 0.0007327373
## 149 0.0011238014
## 150 0.0006657392
## 151 0.0005760039
## 152 0.0007357123
## 153 0.0004620058
## 154 0.0008663381
## 155 0.0004548887
## 156 0.0010976095
## 157 0.0004515599
## 158 0.0009635948
## 159 0.0006603004
## 160 0.0010652083
## 161 0.0012139585
## 162 0.0004519747
## 163 0.0010039189
## 164 0.0004484753
## 165 0.0010671888
## 166 0.0007069726
## 167 0.0008303874
## 168 0.0009357179
## 169 0.0009983007
## 170 0.0009343876
## 171 0.0004493639
## 172 0.0004508349
## 173 0.0008982117
## 174 0.0009285355
## 175 0.0004531830
## 176 0.0004528845
## 177 0.0005359231
## 178 0.0010375023
## 179 0.0007517797
## 180 0.0009799448
## 181 0.0011209038
## 182 0.0006747597
## 183 0.0004342894
## 184 0.0006782662
## 185 0.0004668034
## 186 0.0011673016
## 187 0.0010038496
## 188 0.0011750050
## 189 0.0004674643
## 190 0.0004709530
## 191 0.0004718856
## 192 0.0004708967
## 193 0.0004726529
## 194 0.0007447920
## 195 0.0007688918
## 196 0.0010879613
## 197 0.0004385228
## 198 0.0007285305
## 199 0.0004526595
## 200 0.0004763508
## 201 0.0007329246
## 202 0.0004731103
## 203 0.0005422353
## 204 0.0004715263
## 205 0.0004730446
## 206 0.0004730972
## 207 0.0004550047
## 208 0.0007670386
## 209 0.0004727704
## 210 0.0004721833
## 211 0.0011141488
## 212 0.0007042798
## 213 0.0004703265
## 214 0.0007714124
## 215 0.0004682378
## 216 0.0004668158
## 217 0.0010797776
## 218 0.0011290609
## 219 0.0007583249
## 220 0.0007719019
## 221 0.0007138473
## 222 0.0004665774
## 223 0.0007577915
## 224 0.0009035060
## 225 0.0004664618
## 226 0.0006960714
## 227 0.0004670497
## 228 0.0009809959
## 229 0.0009751296
## 230 0.0005431403
## 231 0.0011912097
## 232 0.0008067244
## 233 0.0011281128
## 234 0.0004674003
## 235 0.0004394913
## 236 0.0011343020
## 237 0.0011578774
## 238 0.0012441954
## 239 0.0011540559
## 240 0.0004675061
##   nvmax
## 7     7

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.
## Coefficients of final model:

##   (Intercept)            x4            x7            x9           x17 
##  2.006915e+00 -4.541413e-05  1.108905e-02  3.535570e-03  1.516178e-03 
##        stat98       stat110      sqrt.x18 
##  3.579154e-03 -3.275780e-03  2.665541e-02

Test

if (algo.stepwise.caret == TRUE){
  test.model(model.stepwise, data.test
             ,method = 'leapSeq',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,id = id
             ,draw.limits = TRUE, transformation = t)
  
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.041   2.084   2.097   2.096   2.109   2.142 
## [1] "leapSeq  Test MSE: 0.00103851731581386"

Stepwise Selection with CV (w/ filtered train)

Train

Test

LASSO (w/ full train)

Train

if(algo.LASSO == TRUE){
  # Formatting data for GLM net
  # you can use model.matrix as well -- model.matrix creates a design (or model) matrix, 
  # e.g., by expanding factors to a set of dummy variables (depending on the contrasts) 
  # and expanding interactions similarly.
  x = as.matrix(data.train[,feature.names])
  y = data.train[,label.names]
  
  xtest = as.matrix(data.test[,feature.names]) 
  ytest = data.test[,label.names] 
  
  grid=10^seq(10,-2, length =100)
  
  set.seed(1)
  model.LASSO=glmnet(x,y,alpha=1, lambda =grid)
  
  cv.out=cv.glmnet(x,y,alpha=1) # alpha=1 performs LASSO
  plot(cv.out)
  bestlambda<-cv.out$lambda.min  # Optimal penalty parameter.  You can make this call visually.
  
  print(coef(model.LASSO,s=bestlambda))
}

Test

if(algo.LASSO == TRUE){
  lasso.pred=predict (model.LASSO ,s=bestlambda ,newx=xtest)
  
  testMSE_LASSO = mean((ytest-lasso.pred)^2)
  print (paste("LASSO Test RMSE: ",testMSE_LASSO, sep=""))
  
  plot(ytest,lasso.pred)
}

LASSO (w/ filtered train)

Train

if(algo.LASSO == TRUE){
  # Formatting data for GLM net
  # you can use model.matrix as well -- model.matrix creates a design (or model) matrix, 
  # e.g., by expanding factors to a set of dummy variables (depending on the contrasts) 
  # and expanding interactions similarly.
  x = as.matrix(data.train2[,feature.names])
  y = data.train2[,label.names]
  
  xtest = as.matrix(data.test[,feature.names]) 
  ytest = data.test[,label.names] 
  
  grid=10^seq(10,-2, length =100)
  
  set.seed(1)
  model.LASSO=glmnet(x,y,alpha=1, lambda =grid)
  
  cv.out=cv.glmnet(x,y,alpha=1) # alpha=1 performs LASSO
  plot(cv.out)
  bestlambda<-cv.out$lambda.min  # Optimal penalty parameter.  You can make this call visually.
  
  print(coef(model.LASSO,s=bestlambda))
}

Test

if(algo.LASSO == TRUE){
  lasso.pred=predict (model.LASSO ,s=bestlambda ,newx=xtest)  
  
  testMSE_LASSO = mean((ytest-lasso.pred)^2)
  print (paste("LASSO Test RMSE: ",testMSE_LASSO, sep=""))
  
  plot(ytest,lasso.pred)
}

LASSO with CV (w/ full train)

Train

if (algo.LASSO.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "glmnet"
                                   ,subopt = 'LASSO'
                                   ,feature.names = feature.names)
  model.LASSO.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.01 on full training set
## glmnet 
## 
## 6002 samples
##  240 predictor
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5402, 5401, 5402, 5401, 5402, 5402, ... 
## Resampling results across tuning parameters:
## 
##   lambda      RMSE        Rsquared  MAE       
##   0.01000000  0.03553800  0.114963  0.02750353
##   0.01047616  0.03567477  0.114963  0.02759932
##   0.01097499  0.03582429  0.114963  0.02770616
##   0.01149757  0.03598768  0.114963  0.02782456
##   0.01204504  0.03616167  0.109576  0.02795007
##   0.01261857  0.03623036       NaN  0.02799885
##   0.01321941  0.03623036       NaN  0.02799885
##   0.01384886  0.03623036       NaN  0.02799885
##   0.01450829  0.03623036       NaN  0.02799885
##   0.01519911  0.03623036       NaN  0.02799885
##   0.01592283  0.03623036       NaN  0.02799885
##   0.01668101  0.03623036       NaN  0.02799885
##   0.01747528  0.03623036       NaN  0.02799885
##   0.01830738  0.03623036       NaN  0.02799885
##   0.01917910  0.03623036       NaN  0.02799885
##   0.02009233  0.03623036       NaN  0.02799885
##   0.02104904  0.03623036       NaN  0.02799885
##   0.02205131  0.03623036       NaN  0.02799885
##   0.02310130  0.03623036       NaN  0.02799885
##   0.02420128  0.03623036       NaN  0.02799885
##   0.02535364  0.03623036       NaN  0.02799885
##   0.02656088  0.03623036       NaN  0.02799885
##   0.02782559  0.03623036       NaN  0.02799885
##   0.02915053  0.03623036       NaN  0.02799885
##   0.03053856  0.03623036       NaN  0.02799885
##   0.03199267  0.03623036       NaN  0.02799885
##   0.03351603  0.03623036       NaN  0.02799885
##   0.03511192  0.03623036       NaN  0.02799885
##   0.03678380  0.03623036       NaN  0.02799885
##   0.03853529  0.03623036       NaN  0.02799885
##   0.04037017  0.03623036       NaN  0.02799885
##   0.04229243  0.03623036       NaN  0.02799885
##   0.04430621  0.03623036       NaN  0.02799885
##   0.04641589  0.03623036       NaN  0.02799885
##   0.04862602  0.03623036       NaN  0.02799885
##   0.05094138  0.03623036       NaN  0.02799885
##   0.05336699  0.03623036       NaN  0.02799885
##   0.05590810  0.03623036       NaN  0.02799885
##   0.05857021  0.03623036       NaN  0.02799885
##   0.06135907  0.03623036       NaN  0.02799885
##   0.06428073  0.03623036       NaN  0.02799885
##   0.06734151  0.03623036       NaN  0.02799885
##   0.07054802  0.03623036       NaN  0.02799885
##   0.07390722  0.03623036       NaN  0.02799885
##   0.07742637  0.03623036       NaN  0.02799885
##   0.08111308  0.03623036       NaN  0.02799885
##   0.08497534  0.03623036       NaN  0.02799885
##   0.08902151  0.03623036       NaN  0.02799885
##   0.09326033  0.03623036       NaN  0.02799885
##   0.09770100  0.03623036       NaN  0.02799885
##   0.10235310  0.03623036       NaN  0.02799885
##   0.10722672  0.03623036       NaN  0.02799885
##   0.11233240  0.03623036       NaN  0.02799885
##   0.11768120  0.03623036       NaN  0.02799885
##   0.12328467  0.03623036       NaN  0.02799885
##   0.12915497  0.03623036       NaN  0.02799885
##   0.13530478  0.03623036       NaN  0.02799885
##   0.14174742  0.03623036       NaN  0.02799885
##   0.14849683  0.03623036       NaN  0.02799885
##   0.15556761  0.03623036       NaN  0.02799885
##   0.16297508  0.03623036       NaN  0.02799885
##   0.17073526  0.03623036       NaN  0.02799885
##   0.17886495  0.03623036       NaN  0.02799885
##   0.18738174  0.03623036       NaN  0.02799885
##   0.19630407  0.03623036       NaN  0.02799885
##   0.20565123  0.03623036       NaN  0.02799885
##   0.21544347  0.03623036       NaN  0.02799885
##   0.22570197  0.03623036       NaN  0.02799885
##   0.23644894  0.03623036       NaN  0.02799885
##   0.24770764  0.03623036       NaN  0.02799885
##   0.25950242  0.03623036       NaN  0.02799885
##   0.27185882  0.03623036       NaN  0.02799885
##   0.28480359  0.03623036       NaN  0.02799885
##   0.29836472  0.03623036       NaN  0.02799885
##   0.31257158  0.03623036       NaN  0.02799885
##   0.32745492  0.03623036       NaN  0.02799885
##   0.34304693  0.03623036       NaN  0.02799885
##   0.35938137  0.03623036       NaN  0.02799885
##   0.37649358  0.03623036       NaN  0.02799885
##   0.39442061  0.03623036       NaN  0.02799885
##   0.41320124  0.03623036       NaN  0.02799885
##   0.43287613  0.03623036       NaN  0.02799885
##   0.45348785  0.03623036       NaN  0.02799885
##   0.47508102  0.03623036       NaN  0.02799885
##   0.49770236  0.03623036       NaN  0.02799885
##   0.52140083  0.03623036       NaN  0.02799885
##   0.54622772  0.03623036       NaN  0.02799885
##   0.57223677  0.03623036       NaN  0.02799885
##   0.59948425  0.03623036       NaN  0.02799885
##   0.62802914  0.03623036       NaN  0.02799885
##   0.65793322  0.03623036       NaN  0.02799885
##   0.68926121  0.03623036       NaN  0.02799885
##   0.72208090  0.03623036       NaN  0.02799885
##   0.75646333  0.03623036       NaN  0.02799885
##   0.79248290  0.03623036       NaN  0.02799885
##   0.83021757  0.03623036       NaN  0.02799885
##   0.86974900  0.03623036       NaN  0.02799885
##   0.91116276  0.03623036       NaN  0.02799885
##   0.95454846  0.03623036       NaN  0.02799885
##   1.00000000  0.03623036       NaN  0.02799885
## 
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.01.

##   alpha lambda
## 1     1   0.01
##     alpha     lambda       RMSE Rsquared        MAE      RMSESD RsquaredSD
## 1       1 0.01000000 0.03553800 0.114963 0.02750353 0.001405490 0.02115810
## 2       1 0.01047616 0.03567477 0.114963 0.02759932 0.001410508 0.02115810
## 3       1 0.01097499 0.03582429 0.114963 0.02770616 0.001415695 0.02115810
## 4       1 0.01149757 0.03598768 0.114963 0.02782456 0.001421054 0.02115810
## 5       1 0.01204504 0.03616167 0.109576 0.02795007 0.001422758 0.01330986
## 6       1 0.01261857 0.03623036      NaN 0.02799885 0.001400273         NA
## 7       1 0.01321941 0.03623036      NaN 0.02799885 0.001400273         NA
## 8       1 0.01384886 0.03623036      NaN 0.02799885 0.001400273         NA
## 9       1 0.01450829 0.03623036      NaN 0.02799885 0.001400273         NA
## 10      1 0.01519911 0.03623036      NaN 0.02799885 0.001400273         NA
## 11      1 0.01592283 0.03623036      NaN 0.02799885 0.001400273         NA
## 12      1 0.01668101 0.03623036      NaN 0.02799885 0.001400273         NA
## 13      1 0.01747528 0.03623036      NaN 0.02799885 0.001400273         NA
## 14      1 0.01830738 0.03623036      NaN 0.02799885 0.001400273         NA
## 15      1 0.01917910 0.03623036      NaN 0.02799885 0.001400273         NA
## 16      1 0.02009233 0.03623036      NaN 0.02799885 0.001400273         NA
## 17      1 0.02104904 0.03623036      NaN 0.02799885 0.001400273         NA
## 18      1 0.02205131 0.03623036      NaN 0.02799885 0.001400273         NA
## 19      1 0.02310130 0.03623036      NaN 0.02799885 0.001400273         NA
## 20      1 0.02420128 0.03623036      NaN 0.02799885 0.001400273         NA
## 21      1 0.02535364 0.03623036      NaN 0.02799885 0.001400273         NA
## 22      1 0.02656088 0.03623036      NaN 0.02799885 0.001400273         NA
## 23      1 0.02782559 0.03623036      NaN 0.02799885 0.001400273         NA
## 24      1 0.02915053 0.03623036      NaN 0.02799885 0.001400273         NA
## 25      1 0.03053856 0.03623036      NaN 0.02799885 0.001400273         NA
## 26      1 0.03199267 0.03623036      NaN 0.02799885 0.001400273         NA
## 27      1 0.03351603 0.03623036      NaN 0.02799885 0.001400273         NA
## 28      1 0.03511192 0.03623036      NaN 0.02799885 0.001400273         NA
## 29      1 0.03678380 0.03623036      NaN 0.02799885 0.001400273         NA
## 30      1 0.03853529 0.03623036      NaN 0.02799885 0.001400273         NA
## 31      1 0.04037017 0.03623036      NaN 0.02799885 0.001400273         NA
## 32      1 0.04229243 0.03623036      NaN 0.02799885 0.001400273         NA
## 33      1 0.04430621 0.03623036      NaN 0.02799885 0.001400273         NA
## 34      1 0.04641589 0.03623036      NaN 0.02799885 0.001400273         NA
## 35      1 0.04862602 0.03623036      NaN 0.02799885 0.001400273         NA
## 36      1 0.05094138 0.03623036      NaN 0.02799885 0.001400273         NA
## 37      1 0.05336699 0.03623036      NaN 0.02799885 0.001400273         NA
## 38      1 0.05590810 0.03623036      NaN 0.02799885 0.001400273         NA
## 39      1 0.05857021 0.03623036      NaN 0.02799885 0.001400273         NA
## 40      1 0.06135907 0.03623036      NaN 0.02799885 0.001400273         NA
## 41      1 0.06428073 0.03623036      NaN 0.02799885 0.001400273         NA
## 42      1 0.06734151 0.03623036      NaN 0.02799885 0.001400273         NA
## 43      1 0.07054802 0.03623036      NaN 0.02799885 0.001400273         NA
## 44      1 0.07390722 0.03623036      NaN 0.02799885 0.001400273         NA
## 45      1 0.07742637 0.03623036      NaN 0.02799885 0.001400273         NA
## 46      1 0.08111308 0.03623036      NaN 0.02799885 0.001400273         NA
## 47      1 0.08497534 0.03623036      NaN 0.02799885 0.001400273         NA
## 48      1 0.08902151 0.03623036      NaN 0.02799885 0.001400273         NA
## 49      1 0.09326033 0.03623036      NaN 0.02799885 0.001400273         NA
## 50      1 0.09770100 0.03623036      NaN 0.02799885 0.001400273         NA
## 51      1 0.10235310 0.03623036      NaN 0.02799885 0.001400273         NA
## 52      1 0.10722672 0.03623036      NaN 0.02799885 0.001400273         NA
## 53      1 0.11233240 0.03623036      NaN 0.02799885 0.001400273         NA
## 54      1 0.11768120 0.03623036      NaN 0.02799885 0.001400273         NA
## 55      1 0.12328467 0.03623036      NaN 0.02799885 0.001400273         NA
## 56      1 0.12915497 0.03623036      NaN 0.02799885 0.001400273         NA
## 57      1 0.13530478 0.03623036      NaN 0.02799885 0.001400273         NA
## 58      1 0.14174742 0.03623036      NaN 0.02799885 0.001400273         NA
## 59      1 0.14849683 0.03623036      NaN 0.02799885 0.001400273         NA
## 60      1 0.15556761 0.03623036      NaN 0.02799885 0.001400273         NA
## 61      1 0.16297508 0.03623036      NaN 0.02799885 0.001400273         NA
## 62      1 0.17073526 0.03623036      NaN 0.02799885 0.001400273         NA
## 63      1 0.17886495 0.03623036      NaN 0.02799885 0.001400273         NA
## 64      1 0.18738174 0.03623036      NaN 0.02799885 0.001400273         NA
## 65      1 0.19630407 0.03623036      NaN 0.02799885 0.001400273         NA
## 66      1 0.20565123 0.03623036      NaN 0.02799885 0.001400273         NA
## 67      1 0.21544347 0.03623036      NaN 0.02799885 0.001400273         NA
## 68      1 0.22570197 0.03623036      NaN 0.02799885 0.001400273         NA
## 69      1 0.23644894 0.03623036      NaN 0.02799885 0.001400273         NA
## 70      1 0.24770764 0.03623036      NaN 0.02799885 0.001400273         NA
## 71      1 0.25950242 0.03623036      NaN 0.02799885 0.001400273         NA
## 72      1 0.27185882 0.03623036      NaN 0.02799885 0.001400273         NA
## 73      1 0.28480359 0.03623036      NaN 0.02799885 0.001400273         NA
## 74      1 0.29836472 0.03623036      NaN 0.02799885 0.001400273         NA
## 75      1 0.31257158 0.03623036      NaN 0.02799885 0.001400273         NA
## 76      1 0.32745492 0.03623036      NaN 0.02799885 0.001400273         NA
## 77      1 0.34304693 0.03623036      NaN 0.02799885 0.001400273         NA
## 78      1 0.35938137 0.03623036      NaN 0.02799885 0.001400273         NA
## 79      1 0.37649358 0.03623036      NaN 0.02799885 0.001400273         NA
## 80      1 0.39442061 0.03623036      NaN 0.02799885 0.001400273         NA
## 81      1 0.41320124 0.03623036      NaN 0.02799885 0.001400273         NA
## 82      1 0.43287613 0.03623036      NaN 0.02799885 0.001400273         NA
## 83      1 0.45348785 0.03623036      NaN 0.02799885 0.001400273         NA
## 84      1 0.47508102 0.03623036      NaN 0.02799885 0.001400273         NA
## 85      1 0.49770236 0.03623036      NaN 0.02799885 0.001400273         NA
## 86      1 0.52140083 0.03623036      NaN 0.02799885 0.001400273         NA
## 87      1 0.54622772 0.03623036      NaN 0.02799885 0.001400273         NA
## 88      1 0.57223677 0.03623036      NaN 0.02799885 0.001400273         NA
## 89      1 0.59948425 0.03623036      NaN 0.02799885 0.001400273         NA
## 90      1 0.62802914 0.03623036      NaN 0.02799885 0.001400273         NA
## 91      1 0.65793322 0.03623036      NaN 0.02799885 0.001400273         NA
## 92      1 0.68926121 0.03623036      NaN 0.02799885 0.001400273         NA
## 93      1 0.72208090 0.03623036      NaN 0.02799885 0.001400273         NA
## 94      1 0.75646333 0.03623036      NaN 0.02799885 0.001400273         NA
## 95      1 0.79248290 0.03623036      NaN 0.02799885 0.001400273         NA
## 96      1 0.83021757 0.03623036      NaN 0.02799885 0.001400273         NA
## 97      1 0.86974900 0.03623036      NaN 0.02799885 0.001400273         NA
## 98      1 0.91116276 0.03623036      NaN 0.02799885 0.001400273         NA
## 99      1 0.95454846 0.03623036      NaN 0.02799885 0.001400273         NA
## 100     1 1.00000000 0.03623036      NaN 0.02799885 0.001400273         NA
##            MAESD
## 1   0.0006664622
## 2   0.0006771143
## 3   0.0006882489
## 4   0.0007001831
## 5   0.0007105293
## 6   0.0006972770
## 7   0.0006972770
## 8   0.0006972770
## 9   0.0006972770
## 10  0.0006972770
## 11  0.0006972770
## 12  0.0006972770
## 13  0.0006972770
## 14  0.0006972770
## 15  0.0006972770
## 16  0.0006972770
## 17  0.0006972770
## 18  0.0006972770
## 19  0.0006972770
## 20  0.0006972770
## 21  0.0006972770
## 22  0.0006972770
## 23  0.0006972770
## 24  0.0006972770
## 25  0.0006972770
## 26  0.0006972770
## 27  0.0006972770
## 28  0.0006972770
## 29  0.0006972770
## 30  0.0006972770
## 31  0.0006972770
## 32  0.0006972770
## 33  0.0006972770
## 34  0.0006972770
## 35  0.0006972770
## 36  0.0006972770
## 37  0.0006972770
## 38  0.0006972770
## 39  0.0006972770
## 40  0.0006972770
## 41  0.0006972770
## 42  0.0006972770
## 43  0.0006972770
## 44  0.0006972770
## 45  0.0006972770
## 46  0.0006972770
## 47  0.0006972770
## 48  0.0006972770
## 49  0.0006972770
## 50  0.0006972770
## 51  0.0006972770
## 52  0.0006972770
## 53  0.0006972770
## 54  0.0006972770
## 55  0.0006972770
## 56  0.0006972770
## 57  0.0006972770
## 58  0.0006972770
## 59  0.0006972770
## 60  0.0006972770
## 61  0.0006972770
## 62  0.0006972770
## 63  0.0006972770
## 64  0.0006972770
## 65  0.0006972770
## 66  0.0006972770
## 67  0.0006972770
## 68  0.0006972770
## 69  0.0006972770
## 70  0.0006972770
## 71  0.0006972770
## 72  0.0006972770
## 73  0.0006972770
## 74  0.0006972770
## 75  0.0006972770
## 76  0.0006972770
## 77  0.0006972770
## 78  0.0006972770
## 79  0.0006972770
## 80  0.0006972770
## 81  0.0006972770
## 82  0.0006972770
## 83  0.0006972770
## 84  0.0006972770
## 85  0.0006972770
## 86  0.0006972770
## 87  0.0006972770
## 88  0.0006972770
## 89  0.0006972770
## 90  0.0006972770
## 91  0.0006972770
## 92  0.0006972770
## 93  0.0006972770
## 94  0.0006972770
## 95  0.0006972770
## 96  0.0006972770
## 97  0.0006972770
## 98  0.0006972770
## 99  0.0006972770
## 100 0.0006972770
## Warning: Removed 95 rows containing missing values (geom_path).
## Warning: Removed 95 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LASSO.caret == TRUE){
  test.model(model.LASSO.caret, data.test
             ,method = 'glmnet',subopt = "LASSO"
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.092   2.094   2.096   2.096   2.098   2.100 
## [1] "glmnet LASSO Test MSE: 0.00127010962901025"

LASSO with CV (w/ filtered train)

Train

if (algo.LASSO.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method = "glmnet"
                                   ,subopt = 'LASSO'
                                   ,feature.names = feature.names)
  model.LASSO.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting alpha = 1, lambda = 0.01 on full training set
## glmnet 
## 
## 5708 samples
##  240 predictor
## 
## No pre-processing
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5137, 5137, 5137, 5137, 5136, 5137, ... 
## Resampling results across tuning parameters:
## 
##   lambda      RMSE        Rsquared   MAE       
##   0.01000000  0.03056302  0.1564097  0.02441058
##   0.01047616  0.03072207  0.1564097  0.02451832
##   0.01097499  0.03089567  0.1564097  0.02463707
##   0.01149757  0.03108508  0.1564097  0.02476805
##   0.01204504  0.03129164  0.1564097  0.02491344
##   0.01261857  0.03141724        NaN  0.02500165
##   0.01321941  0.03141724        NaN  0.02500165
##   0.01384886  0.03141724        NaN  0.02500165
##   0.01450829  0.03141724        NaN  0.02500165
##   0.01519911  0.03141724        NaN  0.02500165
##   0.01592283  0.03141724        NaN  0.02500165
##   0.01668101  0.03141724        NaN  0.02500165
##   0.01747528  0.03141724        NaN  0.02500165
##   0.01830738  0.03141724        NaN  0.02500165
##   0.01917910  0.03141724        NaN  0.02500165
##   0.02009233  0.03141724        NaN  0.02500165
##   0.02104904  0.03141724        NaN  0.02500165
##   0.02205131  0.03141724        NaN  0.02500165
##   0.02310130  0.03141724        NaN  0.02500165
##   0.02420128  0.03141724        NaN  0.02500165
##   0.02535364  0.03141724        NaN  0.02500165
##   0.02656088  0.03141724        NaN  0.02500165
##   0.02782559  0.03141724        NaN  0.02500165
##   0.02915053  0.03141724        NaN  0.02500165
##   0.03053856  0.03141724        NaN  0.02500165
##   0.03199267  0.03141724        NaN  0.02500165
##   0.03351603  0.03141724        NaN  0.02500165
##   0.03511192  0.03141724        NaN  0.02500165
##   0.03678380  0.03141724        NaN  0.02500165
##   0.03853529  0.03141724        NaN  0.02500165
##   0.04037017  0.03141724        NaN  0.02500165
##   0.04229243  0.03141724        NaN  0.02500165
##   0.04430621  0.03141724        NaN  0.02500165
##   0.04641589  0.03141724        NaN  0.02500165
##   0.04862602  0.03141724        NaN  0.02500165
##   0.05094138  0.03141724        NaN  0.02500165
##   0.05336699  0.03141724        NaN  0.02500165
##   0.05590810  0.03141724        NaN  0.02500165
##   0.05857021  0.03141724        NaN  0.02500165
##   0.06135907  0.03141724        NaN  0.02500165
##   0.06428073  0.03141724        NaN  0.02500165
##   0.06734151  0.03141724        NaN  0.02500165
##   0.07054802  0.03141724        NaN  0.02500165
##   0.07390722  0.03141724        NaN  0.02500165
##   0.07742637  0.03141724        NaN  0.02500165
##   0.08111308  0.03141724        NaN  0.02500165
##   0.08497534  0.03141724        NaN  0.02500165
##   0.08902151  0.03141724        NaN  0.02500165
##   0.09326033  0.03141724        NaN  0.02500165
##   0.09770100  0.03141724        NaN  0.02500165
##   0.10235310  0.03141724        NaN  0.02500165
##   0.10722672  0.03141724        NaN  0.02500165
##   0.11233240  0.03141724        NaN  0.02500165
##   0.11768120  0.03141724        NaN  0.02500165
##   0.12328467  0.03141724        NaN  0.02500165
##   0.12915497  0.03141724        NaN  0.02500165
##   0.13530478  0.03141724        NaN  0.02500165
##   0.14174742  0.03141724        NaN  0.02500165
##   0.14849683  0.03141724        NaN  0.02500165
##   0.15556761  0.03141724        NaN  0.02500165
##   0.16297508  0.03141724        NaN  0.02500165
##   0.17073526  0.03141724        NaN  0.02500165
##   0.17886495  0.03141724        NaN  0.02500165
##   0.18738174  0.03141724        NaN  0.02500165
##   0.19630407  0.03141724        NaN  0.02500165
##   0.20565123  0.03141724        NaN  0.02500165
##   0.21544347  0.03141724        NaN  0.02500165
##   0.22570197  0.03141724        NaN  0.02500165
##   0.23644894  0.03141724        NaN  0.02500165
##   0.24770764  0.03141724        NaN  0.02500165
##   0.25950242  0.03141724        NaN  0.02500165
##   0.27185882  0.03141724        NaN  0.02500165
##   0.28480359  0.03141724        NaN  0.02500165
##   0.29836472  0.03141724        NaN  0.02500165
##   0.31257158  0.03141724        NaN  0.02500165
##   0.32745492  0.03141724        NaN  0.02500165
##   0.34304693  0.03141724        NaN  0.02500165
##   0.35938137  0.03141724        NaN  0.02500165
##   0.37649358  0.03141724        NaN  0.02500165
##   0.39442061  0.03141724        NaN  0.02500165
##   0.41320124  0.03141724        NaN  0.02500165
##   0.43287613  0.03141724        NaN  0.02500165
##   0.45348785  0.03141724        NaN  0.02500165
##   0.47508102  0.03141724        NaN  0.02500165
##   0.49770236  0.03141724        NaN  0.02500165
##   0.52140083  0.03141724        NaN  0.02500165
##   0.54622772  0.03141724        NaN  0.02500165
##   0.57223677  0.03141724        NaN  0.02500165
##   0.59948425  0.03141724        NaN  0.02500165
##   0.62802914  0.03141724        NaN  0.02500165
##   0.65793322  0.03141724        NaN  0.02500165
##   0.68926121  0.03141724        NaN  0.02500165
##   0.72208090  0.03141724        NaN  0.02500165
##   0.75646333  0.03141724        NaN  0.02500165
##   0.79248290  0.03141724        NaN  0.02500165
##   0.83021757  0.03141724        NaN  0.02500165
##   0.86974900  0.03141724        NaN  0.02500165
##   0.91116276  0.03141724        NaN  0.02500165
##   0.95454846  0.03141724        NaN  0.02500165
##   1.00000000  0.03141724        NaN  0.02500165
## 
## Tuning parameter 'alpha' was held constant at a value of 1
## RMSE was used to select the optimal model using the smallest value.
## The final values used for the model were alpha = 1 and lambda = 0.01.

##   alpha lambda
## 1     1   0.01
##     alpha     lambda       RMSE  Rsquared        MAE       RMSESD
## 1       1 0.01000000 0.03056302 0.1564097 0.02441058 0.0005568878
## 2       1 0.01047616 0.03072207 0.1564097 0.02451832 0.0005602007
## 3       1 0.01097499 0.03089567 0.1564097 0.02463707 0.0005643146
## 4       1 0.01149757 0.03108508 0.1564097 0.02476805 0.0005693060
## 5       1 0.01204504 0.03129164 0.1564097 0.02491344 0.0005752517
## 6       1 0.01261857 0.03141724       NaN 0.02500165 0.0005633091
## 7       1 0.01321941 0.03141724       NaN 0.02500165 0.0005633091
## 8       1 0.01384886 0.03141724       NaN 0.02500165 0.0005633091
## 9       1 0.01450829 0.03141724       NaN 0.02500165 0.0005633091
## 10      1 0.01519911 0.03141724       NaN 0.02500165 0.0005633091
## 11      1 0.01592283 0.03141724       NaN 0.02500165 0.0005633091
## 12      1 0.01668101 0.03141724       NaN 0.02500165 0.0005633091
## 13      1 0.01747528 0.03141724       NaN 0.02500165 0.0005633091
## 14      1 0.01830738 0.03141724       NaN 0.02500165 0.0005633091
## 15      1 0.01917910 0.03141724       NaN 0.02500165 0.0005633091
## 16      1 0.02009233 0.03141724       NaN 0.02500165 0.0005633091
## 17      1 0.02104904 0.03141724       NaN 0.02500165 0.0005633091
## 18      1 0.02205131 0.03141724       NaN 0.02500165 0.0005633091
## 19      1 0.02310130 0.03141724       NaN 0.02500165 0.0005633091
## 20      1 0.02420128 0.03141724       NaN 0.02500165 0.0005633091
## 21      1 0.02535364 0.03141724       NaN 0.02500165 0.0005633091
## 22      1 0.02656088 0.03141724       NaN 0.02500165 0.0005633091
## 23      1 0.02782559 0.03141724       NaN 0.02500165 0.0005633091
## 24      1 0.02915053 0.03141724       NaN 0.02500165 0.0005633091
## 25      1 0.03053856 0.03141724       NaN 0.02500165 0.0005633091
## 26      1 0.03199267 0.03141724       NaN 0.02500165 0.0005633091
## 27      1 0.03351603 0.03141724       NaN 0.02500165 0.0005633091
## 28      1 0.03511192 0.03141724       NaN 0.02500165 0.0005633091
## 29      1 0.03678380 0.03141724       NaN 0.02500165 0.0005633091
## 30      1 0.03853529 0.03141724       NaN 0.02500165 0.0005633091
## 31      1 0.04037017 0.03141724       NaN 0.02500165 0.0005633091
## 32      1 0.04229243 0.03141724       NaN 0.02500165 0.0005633091
## 33      1 0.04430621 0.03141724       NaN 0.02500165 0.0005633091
## 34      1 0.04641589 0.03141724       NaN 0.02500165 0.0005633091
## 35      1 0.04862602 0.03141724       NaN 0.02500165 0.0005633091
## 36      1 0.05094138 0.03141724       NaN 0.02500165 0.0005633091
## 37      1 0.05336699 0.03141724       NaN 0.02500165 0.0005633091
## 38      1 0.05590810 0.03141724       NaN 0.02500165 0.0005633091
## 39      1 0.05857021 0.03141724       NaN 0.02500165 0.0005633091
## 40      1 0.06135907 0.03141724       NaN 0.02500165 0.0005633091
## 41      1 0.06428073 0.03141724       NaN 0.02500165 0.0005633091
## 42      1 0.06734151 0.03141724       NaN 0.02500165 0.0005633091
## 43      1 0.07054802 0.03141724       NaN 0.02500165 0.0005633091
## 44      1 0.07390722 0.03141724       NaN 0.02500165 0.0005633091
## 45      1 0.07742637 0.03141724       NaN 0.02500165 0.0005633091
## 46      1 0.08111308 0.03141724       NaN 0.02500165 0.0005633091
## 47      1 0.08497534 0.03141724       NaN 0.02500165 0.0005633091
## 48      1 0.08902151 0.03141724       NaN 0.02500165 0.0005633091
## 49      1 0.09326033 0.03141724       NaN 0.02500165 0.0005633091
## 50      1 0.09770100 0.03141724       NaN 0.02500165 0.0005633091
## 51      1 0.10235310 0.03141724       NaN 0.02500165 0.0005633091
## 52      1 0.10722672 0.03141724       NaN 0.02500165 0.0005633091
## 53      1 0.11233240 0.03141724       NaN 0.02500165 0.0005633091
## 54      1 0.11768120 0.03141724       NaN 0.02500165 0.0005633091
## 55      1 0.12328467 0.03141724       NaN 0.02500165 0.0005633091
## 56      1 0.12915497 0.03141724       NaN 0.02500165 0.0005633091
## 57      1 0.13530478 0.03141724       NaN 0.02500165 0.0005633091
## 58      1 0.14174742 0.03141724       NaN 0.02500165 0.0005633091
## 59      1 0.14849683 0.03141724       NaN 0.02500165 0.0005633091
## 60      1 0.15556761 0.03141724       NaN 0.02500165 0.0005633091
## 61      1 0.16297508 0.03141724       NaN 0.02500165 0.0005633091
## 62      1 0.17073526 0.03141724       NaN 0.02500165 0.0005633091
## 63      1 0.17886495 0.03141724       NaN 0.02500165 0.0005633091
## 64      1 0.18738174 0.03141724       NaN 0.02500165 0.0005633091
## 65      1 0.19630407 0.03141724       NaN 0.02500165 0.0005633091
## 66      1 0.20565123 0.03141724       NaN 0.02500165 0.0005633091
## 67      1 0.21544347 0.03141724       NaN 0.02500165 0.0005633091
## 68      1 0.22570197 0.03141724       NaN 0.02500165 0.0005633091
## 69      1 0.23644894 0.03141724       NaN 0.02500165 0.0005633091
## 70      1 0.24770764 0.03141724       NaN 0.02500165 0.0005633091
## 71      1 0.25950242 0.03141724       NaN 0.02500165 0.0005633091
## 72      1 0.27185882 0.03141724       NaN 0.02500165 0.0005633091
## 73      1 0.28480359 0.03141724       NaN 0.02500165 0.0005633091
## 74      1 0.29836472 0.03141724       NaN 0.02500165 0.0005633091
## 75      1 0.31257158 0.03141724       NaN 0.02500165 0.0005633091
## 76      1 0.32745492 0.03141724       NaN 0.02500165 0.0005633091
## 77      1 0.34304693 0.03141724       NaN 0.02500165 0.0005633091
## 78      1 0.35938137 0.03141724       NaN 0.02500165 0.0005633091
## 79      1 0.37649358 0.03141724       NaN 0.02500165 0.0005633091
## 80      1 0.39442061 0.03141724       NaN 0.02500165 0.0005633091
## 81      1 0.41320124 0.03141724       NaN 0.02500165 0.0005633091
## 82      1 0.43287613 0.03141724       NaN 0.02500165 0.0005633091
## 83      1 0.45348785 0.03141724       NaN 0.02500165 0.0005633091
## 84      1 0.47508102 0.03141724       NaN 0.02500165 0.0005633091
## 85      1 0.49770236 0.03141724       NaN 0.02500165 0.0005633091
## 86      1 0.52140083 0.03141724       NaN 0.02500165 0.0005633091
## 87      1 0.54622772 0.03141724       NaN 0.02500165 0.0005633091
## 88      1 0.57223677 0.03141724       NaN 0.02500165 0.0005633091
## 89      1 0.59948425 0.03141724       NaN 0.02500165 0.0005633091
## 90      1 0.62802914 0.03141724       NaN 0.02500165 0.0005633091
## 91      1 0.65793322 0.03141724       NaN 0.02500165 0.0005633091
## 92      1 0.68926121 0.03141724       NaN 0.02500165 0.0005633091
## 93      1 0.72208090 0.03141724       NaN 0.02500165 0.0005633091
## 94      1 0.75646333 0.03141724       NaN 0.02500165 0.0005633091
## 95      1 0.79248290 0.03141724       NaN 0.02500165 0.0005633091
## 96      1 0.83021757 0.03141724       NaN 0.02500165 0.0005633091
## 97      1 0.86974900 0.03141724       NaN 0.02500165 0.0005633091
## 98      1 0.91116276 0.03141724       NaN 0.02500165 0.0005633091
## 99      1 0.95454846 0.03141724       NaN 0.02500165 0.0005633091
## 100     1 1.00000000 0.03141724       NaN 0.02500165 0.0005633091
##     RsquaredSD        MAESD
## 1   0.02633141 0.0003830351
## 2   0.02633141 0.0003827229
## 3   0.02633141 0.0003822826
## 4   0.02633141 0.0003839783
## 5   0.02633141 0.0003856794
## 6           NA 0.0003743830
## 7           NA 0.0003743830
## 8           NA 0.0003743830
## 9           NA 0.0003743830
## 10          NA 0.0003743830
## 11          NA 0.0003743830
## 12          NA 0.0003743830
## 13          NA 0.0003743830
## 14          NA 0.0003743830
## 15          NA 0.0003743830
## 16          NA 0.0003743830
## 17          NA 0.0003743830
## 18          NA 0.0003743830
## 19          NA 0.0003743830
## 20          NA 0.0003743830
## 21          NA 0.0003743830
## 22          NA 0.0003743830
## 23          NA 0.0003743830
## 24          NA 0.0003743830
## 25          NA 0.0003743830
## 26          NA 0.0003743830
## 27          NA 0.0003743830
## 28          NA 0.0003743830
## 29          NA 0.0003743830
## 30          NA 0.0003743830
## 31          NA 0.0003743830
## 32          NA 0.0003743830
## 33          NA 0.0003743830
## 34          NA 0.0003743830
## 35          NA 0.0003743830
## 36          NA 0.0003743830
## 37          NA 0.0003743830
## 38          NA 0.0003743830
## 39          NA 0.0003743830
## 40          NA 0.0003743830
## 41          NA 0.0003743830
## 42          NA 0.0003743830
## 43          NA 0.0003743830
## 44          NA 0.0003743830
## 45          NA 0.0003743830
## 46          NA 0.0003743830
## 47          NA 0.0003743830
## 48          NA 0.0003743830
## 49          NA 0.0003743830
## 50          NA 0.0003743830
## 51          NA 0.0003743830
## 52          NA 0.0003743830
## 53          NA 0.0003743830
## 54          NA 0.0003743830
## 55          NA 0.0003743830
## 56          NA 0.0003743830
## 57          NA 0.0003743830
## 58          NA 0.0003743830
## 59          NA 0.0003743830
## 60          NA 0.0003743830
## 61          NA 0.0003743830
## 62          NA 0.0003743830
## 63          NA 0.0003743830
## 64          NA 0.0003743830
## 65          NA 0.0003743830
## 66          NA 0.0003743830
## 67          NA 0.0003743830
## 68          NA 0.0003743830
## 69          NA 0.0003743830
## 70          NA 0.0003743830
## 71          NA 0.0003743830
## 72          NA 0.0003743830
## 73          NA 0.0003743830
## 74          NA 0.0003743830
## 75          NA 0.0003743830
## 76          NA 0.0003743830
## 77          NA 0.0003743830
## 78          NA 0.0003743830
## 79          NA 0.0003743830
## 80          NA 0.0003743830
## 81          NA 0.0003743830
## 82          NA 0.0003743830
## 83          NA 0.0003743830
## 84          NA 0.0003743830
## 85          NA 0.0003743830
## 86          NA 0.0003743830
## 87          NA 0.0003743830
## 88          NA 0.0003743830
## 89          NA 0.0003743830
## 90          NA 0.0003743830
## 91          NA 0.0003743830
## 92          NA 0.0003743830
## 93          NA 0.0003743830
## 94          NA 0.0003743830
## 95          NA 0.0003743830
## 96          NA 0.0003743830
## 97          NA 0.0003743830
## 98          NA 0.0003743830
## 99          NA 0.0003743830
## 100         NA 0.0003743830
## Warning: Removed 95 rows containing missing values (geom_path).
## Warning: Removed 95 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LASSO.caret == TRUE){
  test.model(model.LASSO.caret, data.test
             ,method = 'glmnet',subopt = "LASSO"
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.088   2.091   2.093   2.093   2.095   2.096 
## [1] "glmnet LASSO Test MSE: 0.00129681819581809"

LARS with CV (w/ full train)

Train

if (algo.LARS.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train
                                   ,method = "lars"
                                   ,subopt = 'NULL'
                                   ,feature.names = feature.names)
  model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.434 on full training set
## Least Angle Regression 
## 
## 6002 samples
##  240 predictor
## 
## Pre-processing: centered (240), scaled (240) 
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5402, 5401, 5402, 5401, 5402, 5402, ... 
## Resampling results across tuning parameters:
## 
##   fraction    RMSE        Rsquared   MAE       
##   0.00000000  0.03623036        NaN  0.02799885
##   0.01010101  0.03579743  0.1149630  0.02768458
##   0.02020202  0.03541053  0.1149630  0.02741268
##   0.03030303  0.03507118  0.1149630  0.02718320
##   0.04040404  0.03478390  0.1236994  0.02698727
##   0.05050505  0.03451932  0.1367743  0.02679842
##   0.06060606  0.03427727  0.1500572  0.02661859
##   0.07070707  0.03405292  0.1619525  0.02644719
##   0.08080808  0.03383771  0.1730473  0.02627791
##   0.09090909  0.03363338  0.1819930  0.02611208
##   0.10101010  0.03344302  0.1890156  0.02595560
##   0.11111111  0.03326631  0.1955805  0.02581076
##   0.12121212  0.03309716  0.2020083  0.02567045
##   0.13131313  0.03293831  0.2072679  0.02553607
##   0.14141414  0.03278988  0.2115510  0.02540726
##   0.15151515  0.03265203  0.2150239  0.02528355
##   0.16161616  0.03252489  0.2178273  0.02516535
##   0.17171717  0.03240859  0.2200792  0.02505349
##   0.18181818  0.03230324  0.2218774  0.02494803
##   0.19191919  0.03221276  0.2233206  0.02485479
##   0.20202020  0.03213541  0.2247774  0.02477309
##   0.21212121  0.03206534  0.2263885  0.02469948
##   0.22222222  0.03200681  0.2278136  0.02463694
##   0.23232323  0.03195556  0.2290974  0.02458427
##   0.24242424  0.03191136  0.2303060  0.02454161
##   0.25252525  0.03187379  0.2313632  0.02450416
##   0.26262626  0.03184318  0.2322271  0.02447245
##   0.27272727  0.03181646  0.2330026  0.02444481
##   0.28282828  0.03179321  0.2337110  0.02442083
##   0.29292929  0.03177256  0.2343564  0.02440039
##   0.30303030  0.03175505  0.2349000  0.02438262
##   0.31313131  0.03174090  0.2353169  0.02436704
##   0.32323232  0.03173000  0.2356047  0.02435416
##   0.33333333  0.03172172  0.2357822  0.02434397
##   0.34343434  0.03171588  0.2358547  0.02433594
##   0.35353535  0.03171146  0.2358763  0.02432924
##   0.36363636  0.03170816  0.2358617  0.02432340
##   0.37373737  0.03170562  0.2358296  0.02431845
##   0.38383838  0.03170297  0.2358274  0.02431402
##   0.39393939  0.03170046  0.2358354  0.02430997
##   0.40404040  0.03169876  0.2358156  0.02430620
##   0.41414141  0.03169704  0.2358114  0.02430256
##   0.42424242  0.03169583  0.2357894  0.02429971
##   0.43434343  0.03169527  0.2357443  0.02429779
##   0.44444444  0.03169568  0.2356592  0.02429647
##   0.45454545  0.03169740  0.2355153  0.02429626
##   0.46464646  0.03170015  0.2353268  0.02429664
##   0.47474747  0.03170361  0.2351097  0.02429804
##   0.48484848  0.03170764  0.2348697  0.02430004
##   0.49494949  0.03171185  0.2346274  0.02430255
##   0.50505051  0.03171664  0.2343603  0.02430561
##   0.51515152  0.03172177  0.2340838  0.02430875
##   0.52525253  0.03172673  0.2338228  0.02431190
##   0.53535354  0.03173220  0.2335435  0.02431543
##   0.54545455  0.03173785  0.2332618  0.02431910
##   0.55555556  0.03174404  0.2329588  0.02432339
##   0.56565657  0.03175035  0.2326565  0.02432757
##   0.57575758  0.03175676  0.2323548  0.02433184
##   0.58585859  0.03176368  0.2320336  0.02433640
##   0.59595960  0.03177098  0.2316985  0.02434107
##   0.60606061  0.03177885  0.2313418  0.02434603
##   0.61616162  0.03178695  0.2309790  0.02435112
##   0.62626263  0.03179541  0.2306042  0.02435629
##   0.63636364  0.03180435  0.2302119  0.02436191
##   0.64646465  0.03181337  0.2298207  0.02436795
##   0.65656566  0.03182257  0.2294261  0.02437436
##   0.66666667  0.03183176  0.2290361  0.02438065
##   0.67676768  0.03184096  0.2286505  0.02438686
##   0.68686869  0.03185028  0.2282648  0.02439335
##   0.69696970  0.03185987  0.2278721  0.02440016
##   0.70707071  0.03186958  0.2274789  0.02440694
##   0.71717172  0.03187956  0.2270778  0.02441389
##   0.72727273  0.03188996  0.2266624  0.02442126
##   0.73737374  0.03190077  0.2262321  0.02442902
##   0.74747475  0.03191201  0.2257872  0.02443689
##   0.75757576  0.03192330  0.2253447  0.02444471
##   0.76767677  0.03193465  0.2249038  0.02445271
##   0.77777778  0.03194609  0.2244622  0.02446107
##   0.78787879  0.03195762  0.2240210  0.02446972
##   0.79797980  0.03196908  0.2235864  0.02447835
##   0.80808081  0.03198079  0.2231445  0.02448700
##   0.81818182  0.03199264  0.2227009  0.02449577
##   0.82828283  0.03200476  0.2222505  0.02450493
##   0.83838384  0.03201697  0.2218007  0.02451408
##   0.84848485  0.03202936  0.2213467  0.02452347
##   0.85858586  0.03204189  0.2208906  0.02453321
##   0.86868687  0.03205457  0.2204319  0.02454308
##   0.87878788  0.03206742  0.2199696  0.02455319
##   0.88888889  0.03208058  0.2194985  0.02456366
##   0.89898990  0.03209401  0.2190194  0.02457462
##   0.90909091  0.03210769  0.2185344  0.02458598
##   0.91919192  0.03212147  0.2180491  0.02459737
##   0.92929293  0.03213524  0.2175685  0.02460890
##   0.93939394  0.03214921  0.2170836  0.02462059
##   0.94949495  0.03216307  0.2166081  0.02463222
##   0.95959596  0.03217703  0.2161318  0.02464407
##   0.96969697  0.03219113  0.2156541  0.02465605
##   0.97979798  0.03220534  0.2151757  0.02466823
##   0.98989899  0.03221964  0.2146986  0.02468060
##   1.00000000  0.03223413  0.2142186  0.02469321
## 
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.4343434.

##     fraction
## 44 0.4343434
## Warning: Removed 1 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LARS.caret == TRUE){
  test.model(model.LARS.caret, data.test
             ,method = 'lars',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.040   2.085   2.097   2.096   2.108   2.140 
## [1] "lars  Test MSE: 0.001037729747415"

LARS with CV (w/ filtered train)

Train

if (algo.LARS.caret == TRUE){
  set.seed(1)
  returned = train.caret.glmselect(formula = formula
                                   ,data =  data.train2
                                   ,method = "lars"
                                   ,subopt = 'NULL'
                                   ,feature.names = feature.names)
  model.LARS.caret = returned$model
}
## Warning in nominalTrainWorkflow(x = x, y = y, wts = weights, info =
## trainInfo, : There were missing values in resampled performance measures.
## Aggregating results
## Selecting tuning parameters
## Fitting fraction = 0.576 on full training set
## Least Angle Regression 
## 
## 5708 samples
##  240 predictor
## 
## Pre-processing: centered (240), scaled (240) 
## Resampling: Cross-Validated (10 fold) 
## Summary of sample sizes: 5137, 5137, 5137, 5137, 5136, 5137, ... 
## Resampling results across tuning parameters:
## 
##   fraction    RMSE        Rsquared   MAE       
##   0.00000000  0.03141724        NaN  0.02500165
##   0.01010101  0.03090891  0.1564097  0.02464644
##   0.02020202  0.03045223  0.1564097  0.02433659
##   0.03030303  0.03005282  0.1587862  0.02406843
##   0.04040404  0.02969326  0.1835517  0.02382605
##   0.05050505  0.02935810  0.1996310  0.02360553
##   0.06060606  0.02905377  0.2099558  0.02340730
##   0.07070707  0.02877609  0.2243843  0.02321486
##   0.08080808  0.02850942  0.2400910  0.02301936
##   0.09090909  0.02825560  0.2528172  0.02283019
##   0.10101010  0.02801641  0.2628044  0.02264939
##   0.11111111  0.02779222  0.2706002  0.02247677
##   0.12121212  0.02758682  0.2774220  0.02231724
##   0.13131313  0.02739187  0.2845787  0.02216981
##   0.14141414  0.02720841  0.2906459  0.02203074
##   0.15151515  0.02703759  0.2955867  0.02190019
##   0.16161616  0.02687965  0.2995901  0.02177678
##   0.17171717  0.02673482  0.3028159  0.02165958
##   0.18181818  0.02660424  0.3054709  0.02155058
##   0.19191919  0.02648995  0.3080524  0.02145478
##   0.20202020  0.02638646  0.3109921  0.02137120
##   0.21212121  0.02629158  0.3139147  0.02129357
##   0.22222222  0.02620665  0.3165308  0.02122365
##   0.23232323  0.02613041  0.3190042  0.02116028
##   0.24242424  0.02605849  0.3215870  0.02110524
##   0.25252525  0.02599529  0.3237749  0.02105531
##   0.26262626  0.02594032  0.3256689  0.02101201
##   0.27272727  0.02589192  0.3273732  0.02097417
##   0.28282828  0.02585070  0.3288548  0.02094309
##   0.29292929  0.02581569  0.3300814  0.02091573
##   0.30303030  0.02578552  0.3311309  0.02089133
##   0.31313131  0.02576011  0.3320245  0.02087101
##   0.32323232  0.02573623  0.3328991  0.02085165
##   0.33333333  0.02571368  0.3337491  0.02083257
##   0.34343434  0.02569362  0.3345189  0.02081505
##   0.35353535  0.02567563  0.3352140  0.02079965
##   0.36363636  0.02565917  0.3358478  0.02078532
##   0.37373737  0.02564462  0.3363972  0.02077268
##   0.38383838  0.02563228  0.3368462  0.02076240
##   0.39393939  0.02562149  0.3372304  0.02075367
##   0.40404040  0.02561185  0.3375640  0.02074621
##   0.41414141  0.02560318  0.3378600  0.02073943
##   0.42424242  0.02559480  0.3381522  0.02073299
##   0.43434343  0.02558702  0.3384243  0.02072696
##   0.44444444  0.02558063  0.3386339  0.02072179
##   0.45454545  0.02557469  0.3388311  0.02071715
##   0.46464646  0.02556970  0.3389864  0.02071347
##   0.47474747  0.02556515  0.3391270  0.02071059
##   0.48484848  0.02556176  0.3392129  0.02070879
##   0.49494949  0.02555881  0.3392811  0.02070711
##   0.50505051  0.02555635  0.3393297  0.02070540
##   0.51515152  0.02555427  0.3393633  0.02070407
##   0.52525253  0.02555263  0.3393800  0.02070305
##   0.53535354  0.02555124  0.3393895  0.02070216
##   0.54545455  0.02555033  0.3393789  0.02070161
##   0.55555556  0.02554975  0.3393552  0.02070121
##   0.56565657  0.02554952  0.3393180  0.02070107
##   0.57575758  0.02554936  0.3392814  0.02070072
##   0.58585859  0.02554947  0.3392359  0.02070032
##   0.59595960  0.02555015  0.3391648  0.02070034
##   0.60606061  0.02555156  0.3390597  0.02070091
##   0.61616162  0.02555339  0.3389376  0.02070168
##   0.62626263  0.02555544  0.3388095  0.02070235
##   0.63636364  0.02555764  0.3386775  0.02070312
##   0.64646465  0.02555966  0.3385604  0.02070392
##   0.65656566  0.02556149  0.3384573  0.02070454
##   0.66666667  0.02556376  0.3383354  0.02070563
##   0.67676768  0.02556669  0.3381832  0.02070741
##   0.68686869  0.02556991  0.3380205  0.02070944
##   0.69696970  0.02557313  0.3378616  0.02071132
##   0.70707071  0.02557623  0.3377128  0.02071319
##   0.71717172  0.02557965  0.3375514  0.02071543
##   0.72727273  0.02558352  0.3373715  0.02071801
##   0.73737374  0.02558785  0.3371726  0.02072102
##   0.74747475  0.02559245  0.3369636  0.02072420
##   0.75757576  0.02559745  0.3367387  0.02072781
##   0.76767677  0.02560274  0.3365032  0.02073165
##   0.77777778  0.02560825  0.3362610  0.02073559
##   0.78787879  0.02561389  0.3360173  0.02073949
##   0.79797980  0.02561982  0.3357634  0.02074389
##   0.80808081  0.02562608  0.3354967  0.02074863
##   0.81818182  0.02563260  0.3352210  0.02075366
##   0.82828283  0.02563942  0.3349341  0.02075877
##   0.83838384  0.02564671  0.3346279  0.02076413
##   0.84848485  0.02565420  0.3343162  0.02076960
##   0.85858586  0.02566213  0.3339871  0.02077547
##   0.86868687  0.02567050  0.3336414  0.02078187
##   0.87878788  0.02567924  0.3332814  0.02078844
##   0.88888889  0.02568823  0.3329142  0.02079511
##   0.89898990  0.02569765  0.3325299  0.02080207
##   0.90909091  0.02570747  0.3321304  0.02080927
##   0.91919192  0.02571755  0.3317228  0.02081662
##   0.92929293  0.02572772  0.3313161  0.02082415
##   0.93939394  0.02573805  0.3309058  0.02083194
##   0.94949495  0.02574861  0.3304895  0.02084005
##   0.95959596  0.02575925  0.3300741  0.02084834
##   0.96969697  0.02577004  0.3296557  0.02085677
##   0.97979798  0.02578093  0.3292381  0.02086526
##   0.98989899  0.02579204  0.3288153  0.02087404
##   1.00000000  0.02580348  0.3283813  0.02088310
## 
## RMSE was used to select the optimal model using the smallest value.
## The final value used for the model was fraction = 0.5757576.

##     fraction
## 58 0.5757576
## Warning: Removed 1 rows containing missing values (geom_point).

## `geom_smooth()` using method = 'gam' and formula 'y ~ s(x, bs = "cs")'

## `stat_bin()` using `bins = 30`. Pick better value with `binwidth`.

Test

if (algo.LARS.caret == TRUE){
  test.model(model.LARS.caret, data.test
             ,method = 'lars',subopt = NULL
             ,formula = formula, feature.names = feature.names, label.names = label.names
             ,draw.limits = TRUE, transformation = t)
}
## [1] "Summary of predicted values: "
##    Min. 1st Qu.  Median    Mean 3rd Qu.    Max. 
##   2.031   2.081   2.093   2.093   2.105   2.140 
## [1] "lars  Test MSE: 0.00106433541121342"